[ 467.469590] env[61972]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61972) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 467.469924] env[61972]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61972) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 467.470033] env[61972]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61972) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 467.470353] env[61972]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 467.564450] env[61972]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61972) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 467.573823] env[61972]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61972) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 468.179685] env[61972]: INFO nova.virt.driver [None req-c61e4267-99df-437a-91a3-a0ddbc87803c None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 468.249956] env[61972]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 468.250118] env[61972]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 468.250244] env[61972]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61972) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 471.476430] env[61972]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-5c1a081d-7a75-4ed0-8040-46abb2c8ed22 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.492698] env[61972]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61972) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 471.492889] env[61972]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-10d22e84-c14a-4886-9d01-c1acff0dd494 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.523861] env[61972]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d6539. [ 471.524024] env[61972]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.274s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 471.524656] env[61972]: INFO nova.virt.vmwareapi.driver [None req-c61e4267-99df-437a-91a3-a0ddbc87803c None None] VMware vCenter version: 7.0.3 [ 471.527980] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a4ec67-fd68-4b74-8e47-9d349ff29db0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.545136] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb2e6bc-816f-494c-b540-429c9edebfd2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.550898] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d2aa41-7fa5-4695-99ea-c876e73d4ceb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.557298] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109c22a4-4d92-4256-802b-c67cb9eb84fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.570055] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7b402c-bcd8-4a2c-a2f6-bbf4ef45fd4a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.575673] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a5a1d0-0e90-4cc8-a66f-62bccbc748a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.605532] env[61972]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-27c45c7c-6a99-4baa-a263-0da0a135aa66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.610861] env[61972]: DEBUG nova.virt.vmwareapi.driver [None req-c61e4267-99df-437a-91a3-a0ddbc87803c None None] Extension org.openstack.compute already exists. {{(pid=61972) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 471.613558] env[61972]: INFO nova.compute.provider_config [None req-c61e4267-99df-437a-91a3-a0ddbc87803c None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 472.116816] env[61972]: DEBUG nova.context [None req-c61e4267-99df-437a-91a3-a0ddbc87803c None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),e4af889f-04d6-4445-bac4-987abbb60d18(cell1) {{(pid=61972) load_cells /opt/stack/nova/nova/context.py:464}} [ 472.118964] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 472.119272] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 472.119953] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 472.120399] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Acquiring lock "e4af889f-04d6-4445-bac4-987abbb60d18" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 472.120590] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Lock "e4af889f-04d6-4445-bac4-987abbb60d18" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 472.121628] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Lock "e4af889f-04d6-4445-bac4-987abbb60d18" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 472.141675] env[61972]: INFO dbcounter [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Registered counter for database nova_cell0 [ 472.150020] env[61972]: INFO dbcounter [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Registered counter for database nova_cell1 [ 472.153348] env[61972]: DEBUG oslo_db.sqlalchemy.engines [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61972) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 472.153687] env[61972]: DEBUG oslo_db.sqlalchemy.engines [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61972) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 472.158497] env[61972]: ERROR nova.db.main.api [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 472.158497] env[61972]: result = function(*args, **kwargs) [ 472.158497] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 472.158497] env[61972]: return func(*args, **kwargs) [ 472.158497] env[61972]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 472.158497] env[61972]: result = fn(*args, **kwargs) [ 472.158497] env[61972]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 472.158497] env[61972]: return f(*args, **kwargs) [ 472.158497] env[61972]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 472.158497] env[61972]: return db.service_get_minimum_version(context, binaries) [ 472.158497] env[61972]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 472.158497] env[61972]: _check_db_access() [ 472.158497] env[61972]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 472.158497] env[61972]: stacktrace = ''.join(traceback.format_stack()) [ 472.158497] env[61972]: [ 472.159343] env[61972]: ERROR nova.db.main.api [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 472.159343] env[61972]: result = function(*args, **kwargs) [ 472.159343] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 472.159343] env[61972]: return func(*args, **kwargs) [ 472.159343] env[61972]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 472.159343] env[61972]: result = fn(*args, **kwargs) [ 472.159343] env[61972]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 472.159343] env[61972]: return f(*args, **kwargs) [ 472.159343] env[61972]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 472.159343] env[61972]: return db.service_get_minimum_version(context, binaries) [ 472.159343] env[61972]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 472.159343] env[61972]: _check_db_access() [ 472.159343] env[61972]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 472.159343] env[61972]: stacktrace = ''.join(traceback.format_stack()) [ 472.159343] env[61972]: [ 472.159711] env[61972]: WARNING nova.objects.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 472.159863] env[61972]: WARNING nova.objects.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Failed to get minimum service version for cell e4af889f-04d6-4445-bac4-987abbb60d18 [ 472.160278] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Acquiring lock "singleton_lock" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 472.160441] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Acquired lock "singleton_lock" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 472.160681] env[61972]: DEBUG oslo_concurrency.lockutils [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Releasing lock "singleton_lock" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 472.160995] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Full set of CONF: {{(pid=61972) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 472.161176] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ******************************************************************************** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 472.161317] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Configuration options gathered from: {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 472.161460] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 472.161652] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 472.161781] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ================================================================================ {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 472.161985] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] allow_resize_to_same_host = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.162171] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] arq_binding_timeout = 300 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.162304] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] backdoor_port = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.162431] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] backdoor_socket = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.162592] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] block_device_allocate_retries = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.162750] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] block_device_allocate_retries_interval = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.162915] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cert = self.pem {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.163090] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.163260] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute_monitors = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.163430] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] config_dir = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.163598] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] config_drive_format = iso9660 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.163731] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.163894] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] config_source = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.164071] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] console_host = devstack {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.164240] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] control_exchange = nova {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.164398] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cpu_allocation_ratio = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.164556] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] daemon = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.164720] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] debug = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.164878] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] default_access_ip_network_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.165055] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] default_availability_zone = nova {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.165216] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] default_ephemeral_format = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.165374] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] default_green_pool_size = 1000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.165608] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.165772] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] default_schedule_zone = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.165931] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] disk_allocation_ratio = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.166107] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] enable_new_services = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.166289] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] enabled_apis = ['osapi_compute'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.166480] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] enabled_ssl_apis = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.166646] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] flat_injected = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.166805] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] force_config_drive = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.166962] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] force_raw_images = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.167156] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] graceful_shutdown_timeout = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.167320] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] heal_instance_info_cache_interval = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.167530] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] host = cpu-1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.167704] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.167866] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] initial_disk_allocation_ratio = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.168038] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] initial_ram_allocation_ratio = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.168254] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.168417] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_build_timeout = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.168572] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_delete_interval = 300 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.168736] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_format = [instance: %(uuid)s] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.168900] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_name_template = instance-%08x {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.169105] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_usage_audit = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.169305] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_usage_audit_period = month {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.169476] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.169645] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] instances_path = /opt/stack/data/nova/instances {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.169811] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] internal_service_availability_zone = internal {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.169969] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] key = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.170146] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] live_migration_retry_count = 30 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.170315] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_color = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.170480] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_config_append = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.170646] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.170804] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_dir = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.170964] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.171141] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_options = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.171333] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_rotate_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.171511] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_rotate_interval_type = days {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.171681] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] log_rotation_type = none {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.171811] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.171936] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.172117] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.172284] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.172413] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.172572] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] long_rpc_timeout = 1800 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.172731] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] max_concurrent_builds = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.172888] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] max_concurrent_live_migrations = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.173058] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] max_concurrent_snapshots = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.173223] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] max_local_block_devices = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.173381] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] max_logfile_count = 30 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.173540] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] max_logfile_size_mb = 200 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.173698] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] maximum_instance_delete_attempts = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.173861] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metadata_listen = 0.0.0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.174033] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metadata_listen_port = 8775 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.174205] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metadata_workers = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.174365] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] migrate_max_retries = -1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.174527] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] mkisofs_cmd = genisoimage {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.174757] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] my_block_storage_ip = 10.180.1.21 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.174897] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] my_ip = 10.180.1.21 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.175115] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.175281] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] network_allocate_retries = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.175464] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.175629] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] osapi_compute_listen = 0.0.0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.175787] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] osapi_compute_listen_port = 8774 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.175948] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] osapi_compute_unique_server_name_scope = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.176127] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] osapi_compute_workers = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.176287] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] password_length = 12 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.176441] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] periodic_enable = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.176594] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] periodic_fuzzy_delay = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.176758] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] pointer_model = usbtablet {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.176917] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] preallocate_images = none {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177084] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] publish_errors = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177214] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] pybasedir = /opt/stack/nova {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177366] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ram_allocation_ratio = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177565] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] rate_limit_burst = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177707] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] rate_limit_except_level = CRITICAL {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177830] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] rate_limit_interval = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.177983] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reboot_timeout = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.178150] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reclaim_instance_interval = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.178304] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] record = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.178494] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reimage_timeout_per_gb = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.178666] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] report_interval = 120 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.178824] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] rescue_timeout = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.178978] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reserved_host_cpus = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.179169] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reserved_host_disk_mb = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.179330] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reserved_host_memory_mb = 512 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.179489] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] reserved_huge_pages = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.179645] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] resize_confirm_window = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.179802] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] resize_fs_using_block_device = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.179955] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] resume_guests_state_on_host_boot = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.180135] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.180297] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] rpc_response_timeout = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.180452] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] run_external_periodic_tasks = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.180617] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] running_deleted_instance_action = reap {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.180772] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] running_deleted_instance_poll_interval = 1800 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.180926] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] running_deleted_instance_timeout = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.181093] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler_instance_sync_interval = 120 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.181289] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_down_time = 720 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.181464] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] servicegroup_driver = db {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.181619] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] shell_completion = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.181777] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] shelved_offload_time = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.181933] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] shelved_poll_interval = 3600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.182109] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] shutdown_timeout = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.182270] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] source_is_ipv6 = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.182434] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ssl_only = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.182674] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.182838] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] sync_power_state_interval = 600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.182995] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] sync_power_state_pool_size = 1000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.183204] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] syslog_log_facility = LOG_USER {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.183379] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] tempdir = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.183540] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] timeout_nbd = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.183704] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] transport_url = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.183863] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] update_resources_interval = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184030] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_cow_images = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184193] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_eventlog = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184351] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_journal = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184507] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_json = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184662] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_rootwrap_daemon = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184815] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_stderr = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.184969] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] use_syslog = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.185136] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vcpu_pin_set = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.185301] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plugging_is_fatal = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.185463] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plugging_timeout = 300 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.185624] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] virt_mkfs = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.185780] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] volume_usage_poll_interval = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.185938] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] watch_log_file = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.186115] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] web = /usr/share/spice-html5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 472.186301] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.186466] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.186629] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.186795] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_concurrency.disable_process_locking = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.187090] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.187274] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.187440] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.187610] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.187779] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.187942] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.188134] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.auth_strategy = keystone {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.188306] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.compute_link_prefix = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.188479] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.188655] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.dhcp_domain = novalocal {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.188823] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.enable_instance_password = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.188988] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.glance_link_prefix = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.189213] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.189406] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.189564] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.instance_list_per_project_cells = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.189725] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.list_records_by_skipping_down_cells = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.189888] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.local_metadata_per_cell = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.190072] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.max_limit = 1000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.190246] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.metadata_cache_expiration = 15 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.190421] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.neutron_default_tenant_id = default {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.190596] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.response_validation = warn {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.190765] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.use_neutron_default_nets = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.190943] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.191123] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.191315] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.191497] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.191669] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_dynamic_targets = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.191834] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_jsonfile_path = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.192023] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.192221] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.backend = dogpile.cache.memcached {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.192390] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.backend_argument = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.192560] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.config_prefix = cache.oslo {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.192727] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.dead_timeout = 60.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.192889] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.debug_cache_backend = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.193107] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.enable_retry_client = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.193299] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.enable_socket_keepalive = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.193479] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.enabled = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.193641] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.enforce_fips_mode = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.193804] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.expiration_time = 600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.193966] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.hashclient_retry_attempts = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.194149] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.hashclient_retry_delay = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.194318] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_dead_retry = 300 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.194480] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_password = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.194642] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.194805] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.194967] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_pool_maxsize = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.195142] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.195344] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_sasl_enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.195530] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.195699] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_socket_timeout = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.195861] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.memcache_username = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.196039] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.proxies = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.196211] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_db = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.196375] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_password = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.196541] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_sentinel_service_name = mymaster {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.196714] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.196883] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_server = localhost:6379 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.197058] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_socket_timeout = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.197223] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.redis_username = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.197385] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.retry_attempts = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.197548] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.retry_delay = 0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.197712] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.socket_keepalive_count = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.197872] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.socket_keepalive_idle = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.198042] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.socket_keepalive_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.198206] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.tls_allowed_ciphers = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.198365] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.tls_cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.198521] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.tls_certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.198680] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.tls_enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.198837] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cache.tls_keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.199013] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.199229] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.auth_type = password {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.199402] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.199579] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.catalog_info = volumev3::publicURL {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.199738] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.199900] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.200075] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.cross_az_attach = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.200283] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.debug = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.200401] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.endpoint_template = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.200567] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.http_retries = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.200728] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.200885] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.201080] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.os_region_name = RegionOne {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.201289] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.201458] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cinder.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.201630] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.201791] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.cpu_dedicated_set = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.201951] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.cpu_shared_set = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.202135] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.image_type_exclude_list = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.202305] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.202492] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.max_concurrent_disk_ops = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.202631] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.max_disk_devices_to_attach = -1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.202791] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.202957] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.203132] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.resource_provider_association_refresh = 300 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.203294] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.203454] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.shutdown_retry_interval = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.203629] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.203806] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] conductor.workers = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.203980] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] console.allowed_origins = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.204155] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] console.ssl_ciphers = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.204327] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] console.ssl_minimum_version = default {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.204493] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] consoleauth.enforce_session_timeout = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.204661] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] consoleauth.token_ttl = 600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.204829] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.204989] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.205168] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.205330] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.205487] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.205644] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.205804] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.205960] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.206132] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.206291] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.206447] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.206604] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.206759] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.206926] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.service_type = accelerator {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.207096] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.207287] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.207466] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.207626] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.207807] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.207966] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] cyborg.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.208161] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.backend = sqlalchemy {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.208333] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.connection = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.208497] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.connection_debug = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.208660] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.connection_parameters = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.208822] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.connection_recycle_time = 3600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.208981] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.connection_trace = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.209225] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.db_inc_retry_interval = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.209419] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.db_max_retries = 20 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.209592] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.db_max_retry_interval = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.209748] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.db_retry_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.209911] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.max_overflow = 50 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.210088] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.max_pool_size = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.210255] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.max_retries = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.210426] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.210584] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.mysql_wsrep_sync_wait = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.210740] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.pool_timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.210901] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.retry_interval = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.211071] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.slave_connection = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.211280] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.sqlite_synchronous = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.211446] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] database.use_db_reconnect = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.211629] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.backend = sqlalchemy {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.211796] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.connection = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.211958] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.connection_debug = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.212221] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.connection_parameters = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.212305] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.connection_recycle_time = 3600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.212467] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.connection_trace = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.212629] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.db_inc_retry_interval = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.212790] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.db_max_retries = 20 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.212955] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.db_max_retry_interval = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.213161] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.db_retry_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.213373] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.max_overflow = 50 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.213545] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.max_pool_size = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.213708] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.max_retries = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.213878] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.214048] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.214212] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.pool_timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.214374] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.retry_interval = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.214531] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.slave_connection = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.214691] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] api_database.sqlite_synchronous = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.214889] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] devices.enabled_mdev_types = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.215081] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.215257] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ephemeral_storage_encryption.default_format = luks {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.215422] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ephemeral_storage_encryption.enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.215582] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.215749] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.api_servers = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.215911] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.216121] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.216325] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.216489] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.216649] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.216811] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.debug = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.216978] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.default_trusted_certificate_ids = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.217154] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.enable_certificate_validation = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.217324] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.enable_rbd_download = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.217486] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.217653] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.217818] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.217977] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.218152] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.218318] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.num_retries = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.218489] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.rbd_ceph_conf = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.218651] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.rbd_connect_timeout = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.218818] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.rbd_pool = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.218983] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.rbd_user = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.219188] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.219360] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.219522] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.219690] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.service_type = image {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.219861] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.220032] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.220196] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.220356] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.220539] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.220699] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.verify_glance_signatures = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.220857] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] glance.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.221030] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] guestfs.debug = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.221233] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.221407] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.auth_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.221567] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.221724] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.221884] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.222055] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.222219] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.222378] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.222538] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.222695] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.222851] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223019] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223183] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223341] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223496] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223665] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.service_type = shared-file-system {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223828] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.share_apply_policy_timeout = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.223996] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.224169] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.224326] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.224480] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.224659] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.224817] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] manila.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.224982] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] mks.enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.225351] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.225543] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] image_cache.manager_interval = 2400 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.225713] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] image_cache.precache_concurrency = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.225881] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] image_cache.remove_unused_base_images = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.226060] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.226231] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.226405] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] image_cache.subdirectory_name = _base {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.226578] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.api_max_retries = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.226741] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.api_retry_interval = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.226902] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.227081] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.auth_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.227244] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.227405] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.227566] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.227728] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.conductor_group = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.227884] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.228056] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.228219] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.228379] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.228534] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.228687] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.228917] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.229225] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.peer_list = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.229439] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.229619] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.229785] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.serial_console_state_timeout = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.229948] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.230199] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.service_type = baremetal {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.230533] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.shard = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.230761] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.230935] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.231116] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.231304] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.231494] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.231658] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ironic.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.231840] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.232025] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] key_manager.fixed_key = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.232240] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.232432] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.barbican_api_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.232653] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.barbican_endpoint = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.232985] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.barbican_endpoint_type = public {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.233230] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.barbican_region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.233414] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.233580] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.233746] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.233912] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.234088] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.234256] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.number_of_retries = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.234419] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.retry_delay = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.234580] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.send_service_user_token = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.234742] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.234900] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.235071] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.verify_ssl = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.235236] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican.verify_ssl_path = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.235406] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.235570] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.auth_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.235726] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.235883] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.236056] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.236231] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.236454] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.236630] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.236791] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] barbican_service_user.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.236960] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.approle_role_id = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.237136] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.approle_secret_id = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.237309] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.kv_mountpoint = secret {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.237469] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.kv_path = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.237629] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.kv_version = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.237785] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.namespace = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.237939] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.root_token_id = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.238156] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.ssl_ca_crt_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.238384] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.timeout = 60.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.238599] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.use_ssl = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.238787] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.239012] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.239212] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.auth_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.239378] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.239613] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.239878] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240038] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240210] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240370] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240531] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240689] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240844] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.240999] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.241194] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.241366] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.241524] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.241695] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.service_type = identity {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.241858] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.242026] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.242209] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.242375] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.242555] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.242711] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] keystone.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.242908] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.connection_uri = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.243082] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_mode = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.243253] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_model_extra_flags = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.243419] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_models = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.243588] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_power_governor_high = performance {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.243754] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_power_governor_low = powersave {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.243913] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_power_management = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.244093] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.244261] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.device_detach_attempts = 8 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.244422] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.device_detach_timeout = 20 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.244583] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.disk_cachemodes = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.244738] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.disk_prefix = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.244905] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.enabled_perf_events = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.245081] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.file_backed_memory = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.245255] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.gid_maps = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.245416] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.hw_disk_discard = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.245571] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.hw_machine_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.245736] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_rbd_ceph_conf = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.245896] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.246067] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.246239] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_rbd_glance_store_name = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.246407] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_rbd_pool = rbd {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.246572] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_type = default {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.246727] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.images_volume_group = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.246884] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.inject_key = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.247075] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.inject_partition = -2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.247263] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.inject_password = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.247428] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.iscsi_iface = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.247586] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.iser_use_multipath = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.247746] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_bandwidth = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.247904] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.248075] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_downtime = 500 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.248240] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.248402] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.248560] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_inbound_addr = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.248719] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.248877] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_permit_post_copy = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.249045] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_scheme = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.249221] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_timeout_action = abort {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.249379] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_tunnelled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.249535] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_uri = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.249693] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.live_migration_with_native_tls = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.249847] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.max_queues = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.250014] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.250259] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.250426] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.nfs_mount_options = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.250719] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.250885] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.251059] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.num_iser_scan_tries = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.251243] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.num_memory_encrypted_guests = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.251411] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.251570] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.num_pcie_ports = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.251733] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.num_volume_scan_tries = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.251895] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.pmem_namespaces = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.252062] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.quobyte_client_cfg = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.252369] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.252706] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rbd_connect_timeout = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.252904] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.253096] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.253292] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rbd_secret_uuid = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.253482] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rbd_user = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.253680] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.253867] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.remote_filesystem_transport = ssh {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.254035] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rescue_image_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.254202] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rescue_kernel_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.254362] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rescue_ramdisk_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.254530] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.254689] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.rx_queue_size = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.254854] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.smbfs_mount_options = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.255158] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.255348] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.snapshot_compression = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.255504] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.snapshot_image_format = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.255721] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.255886] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.sparse_logical_volumes = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.256060] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.swtpm_enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.256236] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.swtpm_group = tss {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.256405] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.swtpm_user = tss {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.256572] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.sysinfo_serial = unique {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.256734] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.tb_cache_size = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.256887] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.tx_queue_size = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.257061] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.uid_maps = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.257225] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.use_virtio_for_bridges = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.257393] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.virt_type = kvm {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.257557] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.volume_clear = zero {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.257718] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.volume_clear_size = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.257882] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.volume_use_multipath = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.258050] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_cache_path = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.258229] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.258397] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_mount_group = qemu {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.258560] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_mount_opts = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.258727] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.259095] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.259417] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.vzstorage_mount_user = stack {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.259564] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.259741] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.259915] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.auth_type = password {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.260092] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.260257] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.260420] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.260578] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.260739] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.260954] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.default_floating_pool = public {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.261099] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.261313] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.extension_sync_interval = 600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.261485] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.http_retries = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.261648] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.261808] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.261965] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.262271] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.262455] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.262628] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.ovs_bridge = br-int {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.262795] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.physnets = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.262962] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.region_name = RegionOne {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.263137] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.263306] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.service_metadata_proxy = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.263518] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.263709] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.service_type = network {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.263870] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.264037] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.264202] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.264358] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.264536] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.264698] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] neutron.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.264867] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] notifications.bdms_in_notifications = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.265089] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] notifications.default_level = INFO {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.265235] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] notifications.notification_format = unversioned {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.265400] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] notifications.notify_on_state_change = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.265572] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.265746] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] pci.alias = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.265912] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] pci.device_spec = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.266090] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] pci.report_in_placement = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.266269] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.266557] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.auth_type = password {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.266611] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.266769] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.266924] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.267099] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.267261] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.267423] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.267668] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.default_domain_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.267855] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.default_domain_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268023] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.domain_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268188] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.domain_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268350] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268512] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268671] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268827] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.268983] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.269271] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.password = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.269422] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.project_domain_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.269592] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.project_domain_name = Default {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.269760] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.project_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.269932] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.project_name = service {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.270154] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.region_name = RegionOne {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.270280] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.270441] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.270656] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.service_type = placement {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.270773] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.270931] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.271207] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.271396] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.system_scope = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.271556] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.271715] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.trust_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.271872] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.user_domain_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.272049] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.user_domain_name = Default {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.272236] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.user_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.272417] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.username = nova {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.272600] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.272761] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] placement.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.272939] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.cores = 20 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.273121] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.count_usage_from_placement = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.273298] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.273536] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.injected_file_content_bytes = 10240 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.273659] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.injected_file_path_length = 255 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.273828] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.injected_files = 5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.273994] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.instances = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.274176] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.key_pairs = 100 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.274345] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.metadata_items = 128 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.274510] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.ram = 51200 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.274674] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.recheck_quota = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.274845] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.server_group_members = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.275015] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] quota.server_groups = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.275199] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.275364] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.275525] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.image_metadata_prefilter = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.275686] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.275905] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.max_attempts = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.276111] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.max_placement_results = 1000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.276284] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.276448] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.query_placement_for_image_type_support = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.276614] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.276787] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] scheduler.workers = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.276957] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.277144] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.277332] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.277496] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.277660] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.277823] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.277988] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.278192] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.278362] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.host_subset_size = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.278525] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.278696] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.278849] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.279023] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.isolated_hosts = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.279211] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.isolated_images = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.279385] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.279549] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.279710] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.279871] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.pci_in_placement = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.280042] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.280251] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.280368] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.280541] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.280702] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.280861] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.281032] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.track_instance_changes = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.281280] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.281416] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metrics.required = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.281582] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metrics.weight_multiplier = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.281748] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.281913] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] metrics.weight_setting = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.282266] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.282449] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] serial_console.enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.282628] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] serial_console.port_range = 10000:20000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.282801] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.282968] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.283151] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] serial_console.serialproxy_port = 6083 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.283320] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.283495] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.auth_type = password {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.283657] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.283813] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.283974] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.284151] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.284313] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.284484] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.send_service_user_token = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.284648] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.284806] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] service_user.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.284976] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.agent_enabled = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.285155] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.285456] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.285647] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.285871] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.html5proxy_port = 6082 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.285980] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.image_compression = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.286154] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.jpeg_compression = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.286315] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.playback_compression = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.286476] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.require_secure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.286641] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.server_listen = 127.0.0.1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.286807] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.286963] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.streaming_mode = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.287134] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] spice.zlib_compression = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.287301] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] upgrade_levels.baseapi = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.287468] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] upgrade_levels.compute = auto {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.287627] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] upgrade_levels.conductor = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.287783] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] upgrade_levels.scheduler = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.287946] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.288118] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.auth_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.288277] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.288434] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.288591] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.288748] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.288903] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.289074] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.289260] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vendordata_dynamic_auth.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.289442] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.api_retry_count = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.289603] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.ca_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.289770] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.cache_prefix = devstack-image-cache {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.289935] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.cluster_name = testcl1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.290111] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.connection_pool_size = 10 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.290272] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.console_delay_seconds = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.290442] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.datastore_regex = ^datastore.* {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.290645] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.290820] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.host_password = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.290984] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.host_port = 443 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.291187] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.host_username = administrator@vsphere.local {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.291373] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.insecure = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.291534] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.integration_bridge = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.291699] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.maximum_objects = 100 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.291859] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.pbm_default_policy = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.292032] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.pbm_enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.292197] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.pbm_wsdl_location = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.292366] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.292526] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.serial_port_proxy_uri = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.292683] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.serial_port_service_uri = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.292850] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.task_poll_interval = 0.5 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.293030] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.use_linked_clone = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.293207] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.vnc_keymap = en-us {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.293375] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.vnc_port = 5900 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.293542] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vmware.vnc_port_total = 10000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.293729] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.auth_schemes = ['none'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.293895] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.294196] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.294386] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.294556] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.novncproxy_port = 6080 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.294732] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.server_listen = 127.0.0.1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.294900] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.295071] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.vencrypt_ca_certs = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.295232] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.vencrypt_client_cert = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.295392] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vnc.vencrypt_client_key = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.295561] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.295721] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.disable_deep_image_inspection = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.295879] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.296049] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.296211] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.296372] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.disable_rootwrap = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.296530] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.enable_numa_live_migration = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.296690] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.296849] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297024] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297184] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.libvirt_disable_apic = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297344] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297503] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297661] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297822] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.297981] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.298160] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.298322] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.298480] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.298641] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.298806] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.298990] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.299192] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.client_socket_timeout = 900 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.299370] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.default_pool_size = 1000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.299538] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.keep_alive = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.299707] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.max_header_line = 16384 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.299869] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.secure_proxy_ssl_header = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.300039] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.ssl_ca_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.300208] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.ssl_cert_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.300370] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.ssl_key_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.300537] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.tcp_keepidle = 600 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.300714] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.300888] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] zvm.ca_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.301056] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] zvm.cloud_connector_url = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.301372] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.301550] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] zvm.reachable_timeout = 300 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.301731] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.enforce_new_defaults = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.302114] env[61972]: WARNING oslo_config.cfg [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 472.302323] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.enforce_scope = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.302507] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.policy_default_rule = default {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.302691] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.302869] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.policy_file = policy.yaml {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.303055] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.303224] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.303387] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.303545] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.303706] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.303873] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.304061] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.304243] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.connection_string = messaging:// {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.304412] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.enabled = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.304581] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.es_doc_type = notification {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.304750] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.es_scroll_size = 10000 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.304916] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.es_scroll_time = 2m {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.305092] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.filter_error_trace = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.305265] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.hmac_keys = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.305520] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.sentinel_service_name = mymaster {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.305693] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.socket_timeout = 0.1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.305857] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.trace_requests = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.306032] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler.trace_sqlalchemy = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.306219] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler_jaeger.process_tags = {} {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.306384] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler_jaeger.service_name_prefix = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.306549] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] profiler_otlp.service_name_prefix = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.306707] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] remote_debug.host = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.306866] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] remote_debug.port = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.307056] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.307225] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.307390] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.307551] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.307712] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.307869] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.308039] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.308204] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.308365] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.308536] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.308694] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.308863] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.309040] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.309232] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.309414] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.309583] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.309746] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.309919] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.310094] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.310257] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.310422] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.310593] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.310755] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.310921] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.311092] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.311281] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.311449] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.311610] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.311776] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.311941] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.ssl = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.312131] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.312307] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.312470] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.312639] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.312810] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.ssl_version = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.312972] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.313177] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.313346] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_notifications.retry = -1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.313530] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.313703] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_messaging_notifications.transport_url = **** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.313875] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.auth_section = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314039] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.auth_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314201] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.cafile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314360] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.certfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314519] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.collect_timing = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314676] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.connect_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314831] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.connect_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.314985] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.endpoint_id = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.315153] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.endpoint_override = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.315313] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.insecure = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.315469] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.keyfile = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.315622] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.max_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.315775] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.min_version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.315928] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.region_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.316099] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.retriable_status_codes = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.316258] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.service_name = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.316415] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.service_type = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.316572] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.split_loggers = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.316726] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.status_code_retries = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.316881] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.status_code_retry_delay = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.317045] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.timeout = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.317204] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.valid_interfaces = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.317362] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_limit.version = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.317525] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_reports.file_event_handler = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.317687] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.317846] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] oslo_reports.log_dir = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.318022] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.318191] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.318352] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.318517] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.318680] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.318838] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.319011] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.319204] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_ovs_privileged.group = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.319377] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.319542] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.319704] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.319864] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] vif_plug_ovs_privileged.user = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.320048] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.flat_interface = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.320233] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.320450] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.320578] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.320748] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.320914] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.321102] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.321306] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.321502] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.321677] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.isolate_vif = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.321845] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.322035] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.322224] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.322396] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.ovsdb_interface = native {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.322559] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] os_vif_ovs.per_port_bridge = False {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.322747] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] privsep_osbrick.capabilities = [21] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.322919] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] privsep_osbrick.group = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.323091] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] privsep_osbrick.helper_command = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.323261] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.323427] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.323584] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] privsep_osbrick.user = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.323754] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.323911] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] nova_sys_admin.group = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.324080] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] nova_sys_admin.helper_command = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.324247] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.324408] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.324566] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] nova_sys_admin.user = None {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 472.324697] env[61972]: DEBUG oslo_service.service [None req-871e997c-c2d8-4ce5-b5a5-9c85ab684f1a None None] ******************************************************************************** {{(pid=61972) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 472.325205] env[61972]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 472.829064] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Getting list of instances from cluster (obj){ [ 472.829064] env[61972]: value = "domain-c8" [ 472.829064] env[61972]: _type = "ClusterComputeResource" [ 472.829064] env[61972]: } {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 472.830179] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f0d50a-4d7d-4c3a-9376-fde841bd7809 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.839269] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Got total of 0 instances {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 472.839784] env[61972]: WARNING nova.virt.vmwareapi.driver [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 472.840253] env[61972]: INFO nova.virt.node [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Generated node identity 2f34b92c-91e8-4983-ae34-7426fcec3157 [ 472.840487] env[61972]: INFO nova.virt.node [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Wrote node identity 2f34b92c-91e8-4983-ae34-7426fcec3157 to /opt/stack/data/n-cpu-1/compute_id [ 473.343491] env[61972]: WARNING nova.compute.manager [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Compute nodes ['2f34b92c-91e8-4983-ae34-7426fcec3157'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 474.349698] env[61972]: INFO nova.compute.manager [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 475.356077] env[61972]: WARNING nova.compute.manager [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 475.356415] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 475.356623] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 475.356787] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 475.357183] env[61972]: DEBUG nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 475.357875] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d089b6-91ba-4fbc-9423-952b791e07db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.366360] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d349a900-fa1c-44ee-b0ea-dc9b6b463e49 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.380051] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c88424a-cc3c-476f-9c52-6e151f56c459 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.386553] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2593be7-dfe3-4ce3-89f0-77b41b57bf74 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 475.415736] env[61972]: DEBUG nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181399MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 475.415894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 475.416092] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 475.918417] env[61972]: WARNING nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] No compute node record for cpu-1:2f34b92c-91e8-4983-ae34-7426fcec3157: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 2f34b92c-91e8-4983-ae34-7426fcec3157 could not be found. [ 476.422531] env[61972]: INFO nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 2f34b92c-91e8-4983-ae34-7426fcec3157 [ 477.930802] env[61972]: DEBUG nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 477.931302] env[61972]: DEBUG nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 478.086428] env[61972]: INFO nova.scheduler.client.report [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] [req-80cbc134-19ce-464c-87dd-2048aa2070b1] Created resource provider record via placement API for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 478.102968] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d02b4ac-9cee-48db-9b88-c53ece2c28ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.110836] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb406676-9799-47e8-afbd-77065885eb92 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.141510] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c03b0f5-7d0b-4d68-a783-221bb621f1ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.148796] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235c3a24-a8ae-4ff9-b41e-9165dfb81bea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.161564] env[61972]: DEBUG nova.compute.provider_tree [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 478.696893] env[61972]: DEBUG nova.scheduler.client.report [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Updated inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 478.697142] env[61972]: DEBUG nova.compute.provider_tree [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Updating resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 generation from 0 to 1 during operation: update_inventory {{(pid=61972) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 478.697287] env[61972]: DEBUG nova.compute.provider_tree [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 478.750220] env[61972]: DEBUG nova.compute.provider_tree [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Updating resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 generation from 1 to 2 during operation: update_traits {{(pid=61972) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 479.255359] env[61972]: DEBUG nova.compute.resource_tracker [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 479.255719] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.839s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 479.255719] env[61972]: DEBUG nova.service [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Creating RPC server for service compute {{(pid=61972) start /opt/stack/nova/nova/service.py:186}} [ 479.269346] env[61972]: DEBUG nova.service [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] Join ServiceGroup membership for this service compute {{(pid=61972) start /opt/stack/nova/nova/service.py:203}} [ 479.269539] env[61972]: DEBUG nova.servicegroup.drivers.db [None req-2cbbde69-ea70-4652-80c5-b6a83308ae9d None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61972) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 512.661590] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "e3a373f2-640f-479d-98f6-963a5fbc38ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.661590] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "e3a373f2-640f-479d-98f6-963a5fbc38ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.164142] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 513.715322] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.715725] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.718347] env[61972]: INFO nova.compute.claims [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 514.272135] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_power_states {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.778098] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Getting list of instances from cluster (obj){ [ 514.778098] env[61972]: value = "domain-c8" [ 514.778098] env[61972]: _type = "ClusterComputeResource" [ 514.778098] env[61972]: } {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 514.778458] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc71be0-e279-41d0-a060-fa725acb222f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.786836] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977dc507-2cd3-4f06-a393-2af52a4729a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.792902] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Got total of 0 instances {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 514.793077] env[61972]: WARNING nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor. [ 514.793226] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Triggering sync for uuid e3a373f2-640f-479d-98f6-963a5fbc38ac {{(pid=61972) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 514.794705] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "e3a373f2-640f-479d-98f6-963a5fbc38ac" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.794867] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.795219] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Getting list of instances from cluster (obj){ [ 514.795219] env[61972]: value = "domain-c8" [ 514.795219] env[61972]: _type = "ClusterComputeResource" [ 514.795219] env[61972]: } {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 514.796325] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5733c20a-c88e-40d4-836b-6ca4013b4b08 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.806021] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Got total of 0 instances {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 514.811136] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cc8d49-a875-4f2c-9103-e44eaadbf829 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.846256] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebac2e1e-96a4-4fb8-aa20-ac430bef563e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.853982] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93ce3e0-6c0c-4a80-b862-4a51b5a0f943 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.868644] env[61972]: DEBUG nova.compute.provider_tree [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.114189] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquiring lock "4f70d05b-de38-41d5-b2ff-4856abd85ee4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.114577] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Lock "4f70d05b-de38-41d5-b2ff-4856abd85ee4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.373938] env[61972]: DEBUG nova.scheduler.client.report [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 515.617800] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 515.882460] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.883984] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 516.151589] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.151898] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.153934] env[61972]: INFO nova.compute.claims [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.191942] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquiring lock "433465bd-370b-4af0-a491-e4321124deca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.192291] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Lock "433465bd-370b-4af0-a491-e4321124deca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.392502] env[61972]: DEBUG nova.compute.utils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 516.394389] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 516.394880] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 516.698016] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 516.832593] env[61972]: DEBUG nova.policy [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a2aee7a1b34c4092c2a52cb7683528', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05f1f715efc54b2abc89921fd29b2ca0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 516.897556] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 517.217588] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.231213] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7fffe5-5f7a-4231-9de1-37b309add406 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.239704] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fb643b-f5c6-4629-89f6-5392193e1edb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.277153] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78245f02-613d-4823-8dc5-e9eb167d89fd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.284813] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f04d40-db01-4cb0-8680-b73bd0d83cae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.300788] env[61972]: DEBUG nova.compute.provider_tree [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 517.573426] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Successfully created port: 7e895ead-a61b-46dc-8368-b89c0bb75a66 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 517.804660] env[61972]: DEBUG nova.scheduler.client.report [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 517.913192] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 517.935318] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 517.935735] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 517.935822] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 517.936015] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 517.936666] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 517.936837] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 517.937065] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 517.937439] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 517.937864] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 517.938053] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 517.938232] env[61972]: DEBUG nova.virt.hardware [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 517.939129] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3455dc-7a03-431b-b561-ceee32d43950 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.955024] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71a140f-b7be-49a5-9f44-e63fa597d887 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.973554] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6ed12a-a162-4f23-8efd-ef77c66a5820 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.310242] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.158s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.310983] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 518.314172] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.096s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.315106] env[61972]: INFO nova.compute.claims [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 518.820504] env[61972]: DEBUG nova.compute.utils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 518.825430] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 518.825430] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 518.927769] env[61972]: DEBUG nova.policy [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3616a5759f84445e84217f2d4bf5912a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8dec145bfec94bcea43138b57c9e8663', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.067538] env[61972]: ERROR nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 519.067538] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 519.067538] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.067538] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.067538] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.067538] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.067538] env[61972]: ERROR nova.compute.manager raise self.value [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.067538] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 519.067538] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.067538] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 519.068321] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.068321] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 519.068321] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 519.068321] env[61972]: ERROR nova.compute.manager [ 519.068321] env[61972]: Traceback (most recent call last): [ 519.068321] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 519.068321] env[61972]: listener.cb(fileno) [ 519.068321] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 519.068321] env[61972]: result = function(*args, **kwargs) [ 519.068321] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 519.068321] env[61972]: return func(*args, **kwargs) [ 519.068321] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 519.068321] env[61972]: raise e [ 519.068321] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 519.068321] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 519.068321] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.068321] env[61972]: created_port_ids = self._update_ports_for_instance( [ 519.068321] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.068321] env[61972]: with excutils.save_and_reraise_exception(): [ 519.068321] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.068321] env[61972]: self.force_reraise() [ 519.068321] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.068321] env[61972]: raise self.value [ 519.068321] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.068321] env[61972]: updated_port = self._update_port( [ 519.068321] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.068321] env[61972]: _ensure_no_port_binding_failure(port) [ 519.068321] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.068321] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 519.069298] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 519.069298] env[61972]: Removing descriptor: 15 [ 519.069492] env[61972]: ERROR nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Traceback (most recent call last): [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] yield resources [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self.driver.spawn(context, instance, image_meta, [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] vm_ref = self.build_virtual_machine(instance, [ 519.069492] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] for vif in network_info: [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return self._sync_wrapper(fn, *args, **kwargs) [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self.wait() [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self[:] = self._gt.wait() [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return self._exit_event.wait() [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 519.069797] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] result = hub.switch() [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return self.greenlet.switch() [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] result = function(*args, **kwargs) [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return func(*args, **kwargs) [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] raise e [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] nwinfo = self.network_api.allocate_for_instance( [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] created_port_ids = self._update_ports_for_instance( [ 519.071115] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] with excutils.save_and_reraise_exception(): [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self.force_reraise() [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] raise self.value [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] updated_port = self._update_port( [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] _ensure_no_port_binding_failure(port) [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] raise exception.PortBindingFailed(port_id=port['id']) [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 519.071532] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] [ 519.071844] env[61972]: INFO nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Terminating instance [ 519.291663] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquiring lock "96d406fc-7802-4d05-a9e7-f0fe5576aa74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.291663] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Lock "96d406fc-7802-4d05-a9e7-f0fe5576aa74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.296986] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Successfully created port: a27daaa7-c19c-47a8-b629-a9f9f57d9aed {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.331190] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 519.415020] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c607858-c620-4c1c-8093-197bad91d9e1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.421487] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbe41cd-a3ac-454c-89d3-e4f241bec71a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.463300] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48613a8-7c52-476d-87f8-cc8bc9f2e3b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.470800] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492381b2-2c84-40ff-82bd-a57aee38f035 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.487024] env[61972]: DEBUG nova.compute.provider_tree [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 519.575745] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.575932] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquired lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.576123] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 519.769608] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquiring lock "0fae076c-ced2-4456-8223-2d71e78fabb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.769919] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Lock "0fae076c-ced2-4456-8223-2d71e78fabb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.794505] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 519.888557] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquiring lock "2d43aa8e-ea11-4209-b166-b87159a37e72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.888949] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Lock "2d43aa8e-ea11-4209-b166-b87159a37e72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.990796] env[61972]: DEBUG nova.scheduler.client.report [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 520.125356] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.230042] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.277317] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.324837] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.344470] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 520.384555] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.384784] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.384937] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.385139] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.385280] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.385426] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.385634] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.386226] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.386433] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.386597] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.386839] env[61972]: DEBUG nova.virt.hardware [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.387805] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b30ad8d-e4a3-4fab-a733-f821b60424b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.392608] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.398554] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0893684-bb7e-4927-bb9c-e38443e56737 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.497318] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.184s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.499773] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 520.503423] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.179s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.505116] env[61972]: INFO nova.compute.claims [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 520.683469] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "75e101c8-0ea7-40d1-a0ce-9a866b252772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.683698] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "75e101c8-0ea7-40d1-a0ce-9a866b252772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.733064] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Releasing lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.733492] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 520.734990] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 520.734990] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2edf7fba-36bf-41d9-950d-2dccec66cd76 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.745712] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0aafae-599d-4880-bfbb-e7a0c5f99dab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.768693] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e3a373f2-640f-479d-98f6-963a5fbc38ac could not be found. [ 520.768919] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 520.769348] env[61972]: INFO nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 520.769641] env[61972]: DEBUG oslo.service.loopingcall [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 520.769842] env[61972]: DEBUG nova.compute.manager [-] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 520.769948] env[61972]: DEBUG nova.network.neutron [-] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 520.802837] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.809717] env[61972]: DEBUG nova.network.neutron [-] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.921272] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.943106] env[61972]: ERROR nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 520.943106] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.943106] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.943106] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.943106] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.943106] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.943106] env[61972]: ERROR nova.compute.manager raise self.value [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.943106] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 520.943106] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.943106] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 520.943602] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.943602] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 520.943602] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 520.943602] env[61972]: ERROR nova.compute.manager [ 520.943602] env[61972]: Traceback (most recent call last): [ 520.943602] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 520.943602] env[61972]: listener.cb(fileno) [ 520.943602] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.943602] env[61972]: result = function(*args, **kwargs) [ 520.943602] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 520.943602] env[61972]: return func(*args, **kwargs) [ 520.943602] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.943602] env[61972]: raise e [ 520.943602] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.943602] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 520.943602] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.943602] env[61972]: created_port_ids = self._update_ports_for_instance( [ 520.943602] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.943602] env[61972]: with excutils.save_and_reraise_exception(): [ 520.943602] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.943602] env[61972]: self.force_reraise() [ 520.943602] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.943602] env[61972]: raise self.value [ 520.943602] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.943602] env[61972]: updated_port = self._update_port( [ 520.943602] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.943602] env[61972]: _ensure_no_port_binding_failure(port) [ 520.943602] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.943602] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 520.944304] env[61972]: nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 520.944304] env[61972]: Removing descriptor: 17 [ 520.944304] env[61972]: ERROR nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Traceback (most recent call last): [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] yield resources [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self.driver.spawn(context, instance, image_meta, [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 520.944304] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] vm_ref = self.build_virtual_machine(instance, [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] vif_infos = vmwarevif.get_vif_info(self._session, [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] for vif in network_info: [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return self._sync_wrapper(fn, *args, **kwargs) [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self.wait() [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self[:] = self._gt.wait() [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return self._exit_event.wait() [ 520.944591] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] result = hub.switch() [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return self.greenlet.switch() [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] result = function(*args, **kwargs) [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return func(*args, **kwargs) [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] raise e [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] nwinfo = self.network_api.allocate_for_instance( [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.944915] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] created_port_ids = self._update_ports_for_instance( [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] with excutils.save_and_reraise_exception(): [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self.force_reraise() [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] raise self.value [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] updated_port = self._update_port( [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] _ensure_no_port_binding_failure(port) [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.945306] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] raise exception.PortBindingFailed(port_id=port['id']) [ 520.945613] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 520.945613] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] [ 520.945613] env[61972]: INFO nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Terminating instance [ 521.005687] env[61972]: DEBUG nova.compute.utils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 521.007059] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 521.008037] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 521.066841] env[61972]: DEBUG nova.compute.manager [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Received event network-changed-7e895ead-a61b-46dc-8368-b89c0bb75a66 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 521.067060] env[61972]: DEBUG nova.compute.manager [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Refreshing instance network info cache due to event network-changed-7e895ead-a61b-46dc-8368-b89c0bb75a66. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 521.067356] env[61972]: DEBUG oslo_concurrency.lockutils [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] Acquiring lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.067463] env[61972]: DEBUG oslo_concurrency.lockutils [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] Acquired lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.067647] env[61972]: DEBUG nova.network.neutron [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Refreshing network info cache for port 7e895ead-a61b-46dc-8368-b89c0bb75a66 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 521.071692] env[61972]: DEBUG nova.policy [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6656efb10c444e21978b1553eb5f7784', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f70baceb5df4b248e09ecf1dcb3aead', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 521.186781] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 521.314294] env[61972]: DEBUG nova.network.neutron [-] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.399512] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Successfully created port: b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.450105] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquiring lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.450747] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquired lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.450986] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 521.511283] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 521.606551] env[61972]: DEBUG nova.network.neutron [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 521.660137] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2617c3d0-1750-447a-9e61-8f9e7a50367e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.668709] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0407809e-f2bc-45a6-b462-be0cfc2e72c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.715594] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6188938-6682-44b1-b81d-d9171d449088 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.719046] env[61972]: DEBUG nova.network.neutron [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.733761] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3f1363-4038-4bde-b113-c4b9b3b7b5be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.751780] env[61972]: DEBUG nova.compute.provider_tree [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.754451] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.819369] env[61972]: INFO nova.compute.manager [-] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Took 1.05 seconds to deallocate network for instance. [ 521.822588] env[61972]: DEBUG nova.compute.claims [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 521.822682] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.994125] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.196636] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.227523] env[61972]: DEBUG oslo_concurrency.lockutils [req-f8682534-2a15-4f27-a67a-8f502bfd3ac9 req-8cc35285-63e5-428f-9181-55ad25b9ac7a service nova] Releasing lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.256369] env[61972]: DEBUG nova.scheduler.client.report [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 522.532908] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 522.570390] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 522.570390] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 522.570642] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 522.571161] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 522.571161] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 522.571447] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 522.571777] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 522.572793] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 522.572793] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 522.572793] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 522.572793] env[61972]: DEBUG nova.virt.hardware [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 522.574347] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ecde26-ff36-4781-ad01-e175b36901a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.588429] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b974060-7fa9-46c9-bd39-2b08c2339eef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.699359] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Releasing lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.699881] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 522.700255] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 522.700567] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16f99d4d-819a-48bb-9370-cb028e2e0a31 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.716274] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6da918-e56d-4a81-ab06-e9b00c018653 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.749511] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4f70d05b-de38-41d5-b2ff-4856abd85ee4 could not be found. [ 522.749768] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 522.750065] env[61972]: INFO nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 522.753203] env[61972]: DEBUG oslo.service.loopingcall [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.753203] env[61972]: DEBUG nova.compute.manager [-] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 522.753203] env[61972]: DEBUG nova.network.neutron [-] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 522.762765] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.762765] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 522.771298] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.965s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.774092] env[61972]: INFO nova.compute.claims [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.786496] env[61972]: DEBUG nova.network.neutron [-] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.292247] env[61972]: DEBUG nova.compute.utils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 523.295164] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 523.295793] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 523.309562] env[61972]: DEBUG nova.network.neutron [-] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.439797] env[61972]: DEBUG nova.policy [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6829bca73381493ebfb80c5254a8a2e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2990305d6f4049d2a525907d2a5a6620', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 523.761275] env[61972]: ERROR nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 523.761275] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.761275] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.761275] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.761275] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.761275] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.761275] env[61972]: ERROR nova.compute.manager raise self.value [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.761275] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 523.761275] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.761275] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 523.761816] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.761816] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 523.761816] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 523.761816] env[61972]: ERROR nova.compute.manager [ 523.761816] env[61972]: Traceback (most recent call last): [ 523.761816] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 523.761816] env[61972]: listener.cb(fileno) [ 523.761816] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.761816] env[61972]: result = function(*args, **kwargs) [ 523.761816] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.761816] env[61972]: return func(*args, **kwargs) [ 523.761816] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.761816] env[61972]: raise e [ 523.761816] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.761816] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 523.761816] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.761816] env[61972]: created_port_ids = self._update_ports_for_instance( [ 523.761816] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.761816] env[61972]: with excutils.save_and_reraise_exception(): [ 523.761816] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.761816] env[61972]: self.force_reraise() [ 523.761816] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.761816] env[61972]: raise self.value [ 523.761816] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.761816] env[61972]: updated_port = self._update_port( [ 523.761816] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.761816] env[61972]: _ensure_no_port_binding_failure(port) [ 523.761816] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.761816] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 523.762544] env[61972]: nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 523.762544] env[61972]: Removing descriptor: 17 [ 523.762544] env[61972]: ERROR nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] Traceback (most recent call last): [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] yield resources [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self.driver.spawn(context, instance, image_meta, [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.762544] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] vm_ref = self.build_virtual_machine(instance, [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] for vif in network_info: [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return self._sync_wrapper(fn, *args, **kwargs) [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self.wait() [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self[:] = self._gt.wait() [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return self._exit_event.wait() [ 523.762838] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] result = hub.switch() [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return self.greenlet.switch() [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] result = function(*args, **kwargs) [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return func(*args, **kwargs) [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] raise e [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] nwinfo = self.network_api.allocate_for_instance( [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.763157] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] created_port_ids = self._update_ports_for_instance( [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] with excutils.save_and_reraise_exception(): [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self.force_reraise() [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] raise self.value [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] updated_port = self._update_port( [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] _ensure_no_port_binding_failure(port) [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.763468] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] raise exception.PortBindingFailed(port_id=port['id']) [ 523.763782] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 523.763782] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] [ 523.763782] env[61972]: INFO nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Terminating instance [ 523.795594] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 523.815131] env[61972]: INFO nova.compute.manager [-] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Took 1.06 seconds to deallocate network for instance. [ 523.816240] env[61972]: DEBUG nova.compute.claims [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 523.816587] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.955847] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779e79b4-42fa-4fbd-a633-c776aaa69f5f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.965480] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c18708-c356-472b-9d25-85534ef022cb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.000912] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1462fe67-ee7d-445c-b457-28cf66f8c9c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.009814] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9bc618-7a0c-4a55-a632-9bfc57466f72 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.025568] env[61972]: DEBUG nova.compute.provider_tree [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.151327] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Successfully created port: 447338cd-16a6-49c9-8cde-4aaf069132ff {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 524.271198] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquiring lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.271458] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquired lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.271657] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 524.666901] env[61972]: DEBUG nova.scheduler.client.report [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 524.804032] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.807012] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 524.831909] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 524.832016] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 524.832191] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 524.832527] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 524.832660] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 524.833802] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 524.833802] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 524.833802] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 524.833802] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 524.833802] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 524.834094] env[61972]: DEBUG nova.virt.hardware [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 524.835417] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87aeaa77-e400-4478-beac-492fa47aa524 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.846950] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79acbc4c-0278-4ae0-839a-b5a538a1d54c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.076765] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.155135] env[61972]: DEBUG nova.compute.manager [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Received event network-vif-deleted-7e895ead-a61b-46dc-8368-b89c0bb75a66 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 525.155340] env[61972]: DEBUG nova.compute.manager [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Received event network-changed-a27daaa7-c19c-47a8-b629-a9f9f57d9aed {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 525.155507] env[61972]: DEBUG nova.compute.manager [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Refreshing instance network info cache due to event network-changed-a27daaa7-c19c-47a8-b629-a9f9f57d9aed. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 525.155712] env[61972]: DEBUG oslo_concurrency.lockutils [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] Acquiring lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.155842] env[61972]: DEBUG oslo_concurrency.lockutils [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] Acquired lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.155993] env[61972]: DEBUG nova.network.neutron [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Refreshing network info cache for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 525.173031] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.173564] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 525.177272] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.255s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.178854] env[61972]: INFO nova.compute.claims [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 525.580992] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Releasing lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.581870] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 525.581870] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 525.582049] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0c3073c-3ce1-4a53-92c8-e97087d66eef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.596603] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca82b9d-ea33-4527-b312-191ef24ef5d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.620357] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 433465bd-370b-4af0-a491-e4321124deca could not be found. [ 525.620596] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 525.620878] env[61972]: INFO nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Took 0.04 seconds to destroy the instance on the hypervisor. [ 525.621070] env[61972]: DEBUG oslo.service.loopingcall [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 525.621331] env[61972]: DEBUG nova.compute.manager [-] [instance: 433465bd-370b-4af0-a491-e4321124deca] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 525.621450] env[61972]: DEBUG nova.network.neutron [-] [instance: 433465bd-370b-4af0-a491-e4321124deca] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 525.689489] env[61972]: DEBUG nova.compute.utils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.697367] env[61972]: DEBUG nova.network.neutron [-] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.698426] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 525.698592] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 525.722871] env[61972]: DEBUG nova.network.neutron [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.952949] env[61972]: DEBUG nova.policy [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3fa61707574456ebdd173cc021d21c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8936cc30efe4c338564fef1658f9396', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 526.129958] env[61972]: DEBUG nova.network.neutron [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.199497] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 526.213422] env[61972]: DEBUG nova.network.neutron [-] [instance: 433465bd-370b-4af0-a491-e4321124deca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.368900] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0807d51a-5eb3-4338-b97c-db163f61a9bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.378098] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00006b72-53f7-44e0-b6a2-55817f919088 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.415308] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cda3a5-9e6a-4f38-bd04-8f8c7708d886 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.423251] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6ab7fc-7598-4331-86eb-bb6b3d8fd3ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.439469] env[61972]: DEBUG nova.compute.provider_tree [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.606011] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "72d434a7-ea70-4594-971f-7eec8ebea153" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.606240] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "72d434a7-ea70-4594-971f-7eec8ebea153" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.633354] env[61972]: DEBUG oslo_concurrency.lockutils [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] Releasing lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.633628] env[61972]: DEBUG nova.compute.manager [req-8bb3f1bc-e87c-40ae-a533-29145cf902e8 req-380847a3-28b8-4dd8-998b-b8c8aaf45bbb service nova] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Received event network-vif-deleted-a27daaa7-c19c-47a8-b629-a9f9f57d9aed {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 526.715653] env[61972]: INFO nova.compute.manager [-] [instance: 433465bd-370b-4af0-a491-e4321124deca] Took 1.09 seconds to deallocate network for instance. [ 526.719259] env[61972]: DEBUG nova.compute.claims [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 526.720175] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.945509] env[61972]: DEBUG nova.scheduler.client.report [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 527.109154] env[61972]: DEBUG nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 527.210324] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 527.251771] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.252078] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.252242] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.252421] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.252557] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.252753] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.253038] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.253038] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.253212] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.253365] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.253528] env[61972]: DEBUG nova.virt.hardware [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.254433] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91535d95-bdda-4379-adde-6eac132d99d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.264202] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500e2122-2856-4ae9-a3c5-7718ed747196 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.423209] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Successfully created port: 7a5e8383-5128-474c-b293-634b1ebdb94f {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 527.449554] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.450065] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 527.460372] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.703s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.462261] env[61972]: INFO nova.compute.claims [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 527.633743] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 527.634193] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 527.634487] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 527.634870] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 527.650437] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.971462] env[61972]: DEBUG nova.compute.utils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 527.980852] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 527.980852] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 528.143188] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143188] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143188] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 433465bd-370b-4af0-a491-e4321124deca] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143188] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143188] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143188] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143882] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 528.143882] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Didn't find any instances for network info cache update. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 528.143882] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.147673] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.147673] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.147673] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.147673] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.147673] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.147673] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 528.148601] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 528.279731] env[61972]: DEBUG nova.policy [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a9aa06fcd3c498399a3252cf9597832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '434f547698164b79a5f84f640c11494a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 528.337231] env[61972]: ERROR nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 528.337231] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.337231] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.337231] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.337231] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.337231] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.337231] env[61972]: ERROR nova.compute.manager raise self.value [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.337231] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 528.337231] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.337231] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 528.337780] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.337780] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 528.337780] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 528.337780] env[61972]: ERROR nova.compute.manager [ 528.337780] env[61972]: Traceback (most recent call last): [ 528.337780] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 528.337780] env[61972]: listener.cb(fileno) [ 528.337780] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.337780] env[61972]: result = function(*args, **kwargs) [ 528.337780] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.337780] env[61972]: return func(*args, **kwargs) [ 528.337780] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 528.337780] env[61972]: raise e [ 528.337780] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.337780] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 528.337780] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.337780] env[61972]: created_port_ids = self._update_ports_for_instance( [ 528.337780] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.337780] env[61972]: with excutils.save_and_reraise_exception(): [ 528.337780] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.337780] env[61972]: self.force_reraise() [ 528.337780] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.337780] env[61972]: raise self.value [ 528.337780] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.337780] env[61972]: updated_port = self._update_port( [ 528.337780] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.337780] env[61972]: _ensure_no_port_binding_failure(port) [ 528.337780] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.337780] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 528.338552] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 528.338552] env[61972]: Removing descriptor: 15 [ 528.338552] env[61972]: ERROR nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Traceback (most recent call last): [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] yield resources [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self.driver.spawn(context, instance, image_meta, [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 528.338552] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] vm_ref = self.build_virtual_machine(instance, [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] vif_infos = vmwarevif.get_vif_info(self._session, [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] for vif in network_info: [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return self._sync_wrapper(fn, *args, **kwargs) [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self.wait() [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self[:] = self._gt.wait() [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return self._exit_event.wait() [ 528.338962] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] result = hub.switch() [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return self.greenlet.switch() [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] result = function(*args, **kwargs) [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return func(*args, **kwargs) [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] raise e [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] nwinfo = self.network_api.allocate_for_instance( [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.339283] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] created_port_ids = self._update_ports_for_instance( [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] with excutils.save_and_reraise_exception(): [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self.force_reraise() [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] raise self.value [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] updated_port = self._update_port( [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] _ensure_no_port_binding_failure(port) [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.340305] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] raise exception.PortBindingFailed(port_id=port['id']) [ 528.340673] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 528.340673] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] [ 528.340673] env[61972]: INFO nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Terminating instance [ 528.481882] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 528.655904] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.674798] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a752c5-cb32-4435-86e0-c3dc0c0b82ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.687025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb67acc-fce7-40c2-b9fa-609d044c1106 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.724678] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ad5dd8-f874-4c29-be8d-353006c40d84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.733448] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec7108a-289e-4a75-9ee6-4ca82d5f8d30 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.753636] env[61972]: DEBUG nova.compute.provider_tree [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.848546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquiring lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.849154] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquired lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.849717] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 529.258555] env[61972]: DEBUG nova.scheduler.client.report [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 529.382661] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.397503] env[61972]: DEBUG nova.compute.manager [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] [instance: 433465bd-370b-4af0-a491-e4321124deca] Received event network-changed-b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 529.397695] env[61972]: DEBUG nova.compute.manager [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] [instance: 433465bd-370b-4af0-a491-e4321124deca] Refreshing instance network info cache due to event network-changed-b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 529.399745] env[61972]: DEBUG oslo_concurrency.lockutils [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] Acquiring lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.401023] env[61972]: DEBUG oslo_concurrency.lockutils [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] Acquired lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.401023] env[61972]: DEBUG nova.network.neutron [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] [instance: 433465bd-370b-4af0-a491-e4321124deca] Refreshing network info cache for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 529.499961] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 529.514492] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.543097] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 529.543424] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 529.543606] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 529.543810] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 529.543966] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 529.544130] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 529.544496] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 529.544593] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 529.544763] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 529.544922] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 529.545165] env[61972]: DEBUG nova.virt.hardware [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 529.546355] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e105e6e-d4b9-4da4-943c-ea0e19135796 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.549891] env[61972]: ERROR nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 529.549891] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 529.549891] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.549891] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.549891] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.549891] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.549891] env[61972]: ERROR nova.compute.manager raise self.value [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.549891] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.549891] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.549891] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.550361] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.550361] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.550361] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 529.550361] env[61972]: ERROR nova.compute.manager [ 529.550361] env[61972]: Traceback (most recent call last): [ 529.550361] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.550361] env[61972]: listener.cb(fileno) [ 529.550361] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.550361] env[61972]: result = function(*args, **kwargs) [ 529.550361] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 529.550361] env[61972]: return func(*args, **kwargs) [ 529.550361] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 529.550361] env[61972]: raise e [ 529.550361] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 529.550361] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 529.550361] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.550361] env[61972]: created_port_ids = self._update_ports_for_instance( [ 529.550361] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.550361] env[61972]: with excutils.save_and_reraise_exception(): [ 529.550361] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.550361] env[61972]: self.force_reraise() [ 529.550361] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.550361] env[61972]: raise self.value [ 529.550361] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.550361] env[61972]: updated_port = self._update_port( [ 529.550361] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.550361] env[61972]: _ensure_no_port_binding_failure(port) [ 529.550361] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.550361] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.551978] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 529.551978] env[61972]: Removing descriptor: 17 [ 529.551978] env[61972]: ERROR nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Traceback (most recent call last): [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] yield resources [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self.driver.spawn(context, instance, image_meta, [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.551978] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] vm_ref = self.build_virtual_machine(instance, [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] for vif in network_info: [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return self._sync_wrapper(fn, *args, **kwargs) [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self.wait() [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self[:] = self._gt.wait() [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return self._exit_event.wait() [ 529.553260] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] result = hub.switch() [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return self.greenlet.switch() [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] result = function(*args, **kwargs) [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return func(*args, **kwargs) [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] raise e [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] nwinfo = self.network_api.allocate_for_instance( [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.553660] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] created_port_ids = self._update_ports_for_instance( [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] with excutils.save_and_reraise_exception(): [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self.force_reraise() [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] raise self.value [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] updated_port = self._update_port( [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] _ensure_no_port_binding_failure(port) [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.554104] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] raise exception.PortBindingFailed(port_id=port['id']) [ 529.554431] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 529.554431] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] [ 529.554431] env[61972]: INFO nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Terminating instance [ 529.559116] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e2f655-9159-4dc4-a479-12f3ba1cf850 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.599582] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Successfully created port: 14a2dc7b-cf4e-48dc-85af-bd4f0a160952 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 529.763317] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.763854] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 529.767592] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.945s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.925207] env[61972]: DEBUG nova.network.neutron [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.017954] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Releasing lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.018394] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 530.018664] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 530.018956] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9080742-d7e6-414c-8ad8-ca23cf7d416c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.029065] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bde1ef-4766-447f-8eb9-d0f22c5f2bb4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.057170] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquiring lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.057374] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquired lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.057834] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 530.060209] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 96d406fc-7802-4d05-a9e7-f0fe5576aa74 could not be found. [ 530.060209] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 530.060209] env[61972]: INFO nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Took 0.04 seconds to destroy the instance on the hypervisor. [ 530.060371] env[61972]: DEBUG oslo.service.loopingcall [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.061732] env[61972]: DEBUG nova.network.neutron [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] [instance: 433465bd-370b-4af0-a491-e4321124deca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.063294] env[61972]: DEBUG nova.compute.manager [-] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 530.063294] env[61972]: DEBUG nova.network.neutron [-] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 530.109181] env[61972]: DEBUG nova.network.neutron [-] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.269924] env[61972]: DEBUG nova.compute.utils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 530.271378] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 530.271487] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 530.381022] env[61972]: DEBUG nova.policy [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e1217449554a459d4f5cb72bfc578f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b80714a72c14aef842c7b02001edd92', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 530.440712] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7303585e-9da9-42c0-9f85-a952df61851e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.450659] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82efc825-64b2-4e59-a451-3978a40b6c60 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.481347] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c97e4b-9b7e-46ed-857a-c64a466b4e76 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.488619] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f87d4e-6c5e-4e2b-8796-6d5b53e71610 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.502178] env[61972]: DEBUG nova.compute.provider_tree [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.569484] env[61972]: DEBUG oslo_concurrency.lockutils [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] Releasing lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.569701] env[61972]: DEBUG nova.compute.manager [req-293aeccf-eaf2-447c-a23e-68da9da13e84 req-56596d64-88a7-4013-986d-4eb1fe65520e service nova] [instance: 433465bd-370b-4af0-a491-e4321124deca] Received event network-vif-deleted-b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 530.590736] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.613434] env[61972]: DEBUG nova.network.neutron [-] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.778267] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 530.844869] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.007613] env[61972]: DEBUG nova.scheduler.client.report [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 531.116538] env[61972]: INFO nova.compute.manager [-] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Took 1.05 seconds to deallocate network for instance. [ 531.120028] env[61972]: DEBUG nova.compute.claims [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 531.120028] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.188110] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Successfully created port: 1b583a76-0230-4efa-b353-d1b1706fec01 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.347547] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Releasing lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.347966] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 531.348174] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 531.348455] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b35b30f-4563-495f-a273-812b260874a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.358210] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77536254-4221-4ddf-8b61-155224f2ed27 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.382055] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0fae076c-ced2-4456-8223-2d71e78fabb4 could not be found. [ 531.382159] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 531.382285] env[61972]: INFO nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 531.382547] env[61972]: DEBUG oslo.service.loopingcall [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 531.382779] env[61972]: DEBUG nova.compute.manager [-] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 531.383189] env[61972]: DEBUG nova.network.neutron [-] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 531.415893] env[61972]: DEBUG nova.network.neutron [-] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.513946] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.746s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.514702] env[61972]: ERROR nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Traceback (most recent call last): [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self.driver.spawn(context, instance, image_meta, [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] vm_ref = self.build_virtual_machine(instance, [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 531.514702] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] for vif in network_info: [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return self._sync_wrapper(fn, *args, **kwargs) [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self.wait() [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self[:] = self._gt.wait() [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return self._exit_event.wait() [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] result = hub.switch() [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 531.515028] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return self.greenlet.switch() [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] result = function(*args, **kwargs) [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] return func(*args, **kwargs) [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] raise e [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] nwinfo = self.network_api.allocate_for_instance( [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] created_port_ids = self._update_ports_for_instance( [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] with excutils.save_and_reraise_exception(): [ 531.515385] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] self.force_reraise() [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] raise self.value [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] updated_port = self._update_port( [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] _ensure_no_port_binding_failure(port) [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] raise exception.PortBindingFailed(port_id=port['id']) [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] nova.exception.PortBindingFailed: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. [ 531.515736] env[61972]: ERROR nova.compute.manager [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] [ 531.516031] env[61972]: DEBUG nova.compute.utils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 531.516518] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.700s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.527037] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Build of instance e3a373f2-640f-479d-98f6-963a5fbc38ac was re-scheduled: Binding failed for port 7e895ead-a61b-46dc-8368-b89c0bb75a66, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 531.527536] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 531.527641] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.527790] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquired lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.527946] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 531.643421] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquiring lock "96e5b238-aab4-4f75-abe8-f5a14b015099" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.643659] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Lock "96e5b238-aab4-4f75-abe8-f5a14b015099" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.793140] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 531.832761] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.832761] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.832761] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.833104] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.833104] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.833104] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.833104] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.833104] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.833978] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.838529] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.838529] env[61972]: DEBUG nova.virt.hardware [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.838529] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec304ae-1a3c-4e50-b401-0593fd307e3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.847218] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd9ba96-e2a0-4402-921c-601134c4d873 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.922404] env[61972]: DEBUG nova.network.neutron [-] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.070304] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.150098] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 532.195768] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.259204] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b90f537-9465-4ef5-904a-85c837dcc8fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.271110] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0aeefda-a2c2-42cb-bff6-71caf3c2d771 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.305402] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c711eb7-5bf3-42ec-a376-8c27aa51f887 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.317429] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1ca6ea-3ddb-481c-8e4b-18ea719f9ca5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.333199] env[61972]: DEBUG nova.compute.provider_tree [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.400935] env[61972]: ERROR nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 532.400935] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.400935] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.400935] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.400935] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.400935] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.400935] env[61972]: ERROR nova.compute.manager raise self.value [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.400935] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 532.400935] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.400935] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 532.401931] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.401931] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 532.401931] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 532.401931] env[61972]: ERROR nova.compute.manager [ 532.401931] env[61972]: Traceback (most recent call last): [ 532.401931] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 532.401931] env[61972]: listener.cb(fileno) [ 532.401931] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.401931] env[61972]: result = function(*args, **kwargs) [ 532.401931] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.401931] env[61972]: return func(*args, **kwargs) [ 532.401931] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 532.401931] env[61972]: raise e [ 532.401931] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.401931] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 532.401931] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.401931] env[61972]: created_port_ids = self._update_ports_for_instance( [ 532.401931] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.401931] env[61972]: with excutils.save_and_reraise_exception(): [ 532.401931] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.401931] env[61972]: self.force_reraise() [ 532.401931] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.401931] env[61972]: raise self.value [ 532.401931] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.401931] env[61972]: updated_port = self._update_port( [ 532.401931] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.401931] env[61972]: _ensure_no_port_binding_failure(port) [ 532.401931] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.401931] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 532.402618] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 532.402618] env[61972]: Removing descriptor: 17 [ 532.402618] env[61972]: ERROR nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Traceback (most recent call last): [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] yield resources [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self.driver.spawn(context, instance, image_meta, [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.402618] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] vm_ref = self.build_virtual_machine(instance, [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] for vif in network_info: [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return self._sync_wrapper(fn, *args, **kwargs) [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self.wait() [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self[:] = self._gt.wait() [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return self._exit_event.wait() [ 532.402912] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] result = hub.switch() [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return self.greenlet.switch() [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] result = function(*args, **kwargs) [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return func(*args, **kwargs) [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] raise e [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] nwinfo = self.network_api.allocate_for_instance( [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.403232] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] created_port_ids = self._update_ports_for_instance( [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] with excutils.save_and_reraise_exception(): [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self.force_reraise() [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] raise self.value [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] updated_port = self._update_port( [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] _ensure_no_port_binding_failure(port) [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.403528] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] raise exception.PortBindingFailed(port_id=port['id']) [ 532.403841] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 532.403841] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] [ 532.403841] env[61972]: INFO nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Terminating instance [ 532.424306] env[61972]: INFO nova.compute.manager [-] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Took 1.04 seconds to deallocate network for instance. [ 532.426799] env[61972]: DEBUG nova.compute.claims [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 532.426987] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.511871] env[61972]: ERROR nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 532.511871] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.511871] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.511871] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.511871] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.511871] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.511871] env[61972]: ERROR nova.compute.manager raise self.value [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.511871] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 532.511871] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.511871] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 532.512379] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.512379] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 532.512379] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 532.512379] env[61972]: ERROR nova.compute.manager [ 532.512379] env[61972]: Traceback (most recent call last): [ 532.512379] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 532.512379] env[61972]: listener.cb(fileno) [ 532.512379] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.512379] env[61972]: result = function(*args, **kwargs) [ 532.512379] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.512379] env[61972]: return func(*args, **kwargs) [ 532.512379] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 532.512379] env[61972]: raise e [ 532.512379] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.512379] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 532.512379] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.512379] env[61972]: created_port_ids = self._update_ports_for_instance( [ 532.512379] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.512379] env[61972]: with excutils.save_and_reraise_exception(): [ 532.512379] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.512379] env[61972]: self.force_reraise() [ 532.512379] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.512379] env[61972]: raise self.value [ 532.512379] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.512379] env[61972]: updated_port = self._update_port( [ 532.512379] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.512379] env[61972]: _ensure_no_port_binding_failure(port) [ 532.512379] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.512379] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 532.513172] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 532.513172] env[61972]: Removing descriptor: 19 [ 532.513172] env[61972]: ERROR nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Traceback (most recent call last): [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] yield resources [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self.driver.spawn(context, instance, image_meta, [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.513172] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] vm_ref = self.build_virtual_machine(instance, [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] for vif in network_info: [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return self._sync_wrapper(fn, *args, **kwargs) [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self.wait() [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self[:] = self._gt.wait() [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return self._exit_event.wait() [ 532.513475] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] result = hub.switch() [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return self.greenlet.switch() [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] result = function(*args, **kwargs) [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return func(*args, **kwargs) [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] raise e [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] nwinfo = self.network_api.allocate_for_instance( [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.515112] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] created_port_ids = self._update_ports_for_instance( [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] with excutils.save_and_reraise_exception(): [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self.force_reraise() [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] raise self.value [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] updated_port = self._update_port( [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] _ensure_no_port_binding_failure(port) [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.516407] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] raise exception.PortBindingFailed(port_id=port['id']) [ 532.516771] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 532.516771] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] [ 532.516771] env[61972]: INFO nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Terminating instance [ 532.673654] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.697581] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Releasing lock "refresh_cache-e3a373f2-640f-479d-98f6-963a5fbc38ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.697848] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 532.698019] env[61972]: DEBUG nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 532.698186] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 532.718110] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.795723] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "d32a7937-792a-4959-bded-819463472399" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.795833] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "d32a7937-792a-4959-bded-819463472399" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.836966] env[61972]: DEBUG nova.scheduler.client.report [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 532.908304] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.908485] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquired lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.908673] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.017681] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquiring lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.017935] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquired lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.018137] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.220420] env[61972]: DEBUG nova.network.neutron [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.298133] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 533.344181] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.826s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.344181] env[61972]: ERROR nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Traceback (most recent call last): [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self.driver.spawn(context, instance, image_meta, [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.344181] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] vm_ref = self.build_virtual_machine(instance, [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] for vif in network_info: [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return self._sync_wrapper(fn, *args, **kwargs) [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self.wait() [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self[:] = self._gt.wait() [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return self._exit_event.wait() [ 533.344457] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] result = hub.switch() [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return self.greenlet.switch() [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] result = function(*args, **kwargs) [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] return func(*args, **kwargs) [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] raise e [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] nwinfo = self.network_api.allocate_for_instance( [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.344777] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] created_port_ids = self._update_ports_for_instance( [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] with excutils.save_and_reraise_exception(): [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] self.force_reraise() [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] raise self.value [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] updated_port = self._update_port( [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] _ensure_no_port_binding_failure(port) [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.345112] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] raise exception.PortBindingFailed(port_id=port['id']) [ 533.345523] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] nova.exception.PortBindingFailed: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. [ 533.345523] env[61972]: ERROR nova.compute.manager [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] [ 533.347767] env[61972]: DEBUG nova.compute.utils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 533.349393] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.630s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.354665] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Build of instance 4f70d05b-de38-41d5-b2ff-4856abd85ee4 was re-scheduled: Binding failed for port a27daaa7-c19c-47a8-b629-a9f9f57d9aed, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 533.355099] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 533.355338] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquiring lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.355488] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Acquired lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.355616] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.435300] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.549173] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.612547] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.726018] env[61972]: INFO nova.compute.manager [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] Took 1.03 seconds to deallocate network for instance. [ 533.730284] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.813138] env[61972]: DEBUG nova.compute.manager [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Received event network-changed-447338cd-16a6-49c9-8cde-4aaf069132ff {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 533.813370] env[61972]: DEBUG nova.compute.manager [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Refreshing instance network info cache due to event network-changed-447338cd-16a6-49c9-8cde-4aaf069132ff. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 533.813691] env[61972]: DEBUG oslo_concurrency.lockutils [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] Acquiring lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.814104] env[61972]: DEBUG oslo_concurrency.lockutils [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] Acquired lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.814104] env[61972]: DEBUG nova.network.neutron [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Refreshing network info cache for port 447338cd-16a6-49c9-8cde-4aaf069132ff {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 533.836104] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.890039] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "fb28710d-cd15-41d4-b7aa-8389093ea9a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.890039] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "fb28710d-cd15-41d4-b7aa-8389093ea9a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.892188] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.906463] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquiring lock "7a7c98db-6ed4-4908-adc8-53347d693dca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.907083] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Lock "7a7c98db-6ed4-4908-adc8-53347d693dca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.037625] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.044573] env[61972]: DEBUG nova.compute.manager [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Received event network-changed-14a2dc7b-cf4e-48dc-85af-bd4f0a160952 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 534.044846] env[61972]: DEBUG nova.compute.manager [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Refreshing instance network info cache due to event network-changed-14a2dc7b-cf4e-48dc-85af-bd4f0a160952. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 534.045087] env[61972]: DEBUG oslo_concurrency.lockutils [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] Acquiring lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.085534] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb9b2a7-3326-4279-83d8-e71d88b5ee67 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.093688] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279a57cb-6b96-4f61-a5d3-0cf7ddc448ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.132809] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Releasing lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.133252] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 534.133650] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.134754] env[61972]: DEBUG oslo_concurrency.lockutils [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] Acquired lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.135042] env[61972]: DEBUG nova.network.neutron [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Refreshing network info cache for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 534.135912] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52f77b2c-1c53-4718-8fe0-703f19291f42 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.138288] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de505f5-249a-4530-b541-f5c8d7327efc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.151921] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd271d5-be8c-421c-aad4-9a89930ef1dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.156959] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0459d6-8cfb-4030-8a85-5702fc098f1e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.179736] env[61972]: DEBUG nova.compute.provider_tree [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.185074] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2d43aa8e-ea11-4209-b166-b87159a37e72 could not be found. [ 534.185306] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 534.185482] env[61972]: INFO nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Took 0.05 seconds to destroy the instance on the hypervisor. [ 534.185737] env[61972]: DEBUG oslo.service.loopingcall [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.186153] env[61972]: DEBUG nova.compute.manager [-] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 534.186250] env[61972]: DEBUG nova.network.neutron [-] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.209013] env[61972]: DEBUG nova.network.neutron [-] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.238138] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Releasing lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.238575] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 534.238717] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.239281] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eac7bca3-51ce-45b7-b00c-b84f42df795c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.247890] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f23f3-0999-4e44-b07a-a8f63a9de880 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.274045] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 75e101c8-0ea7-40d1-a0ce-9a866b252772 could not be found. [ 534.274336] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 534.274525] env[61972]: INFO nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Took 0.04 seconds to destroy the instance on the hypervisor. [ 534.274777] env[61972]: DEBUG oslo.service.loopingcall [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.275017] env[61972]: DEBUG nova.compute.manager [-] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 534.275140] env[61972]: DEBUG nova.network.neutron [-] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.312044] env[61972]: DEBUG nova.network.neutron [-] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.339544] env[61972]: DEBUG nova.network.neutron [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.492890] env[61972]: DEBUG nova.network.neutron [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.545483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Releasing lock "refresh_cache-4f70d05b-de38-41d5-b2ff-4856abd85ee4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.545483] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 534.545483] env[61972]: DEBUG nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 534.545483] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.582334] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.669019] env[61972]: DEBUG nova.network.neutron [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.688024] env[61972]: DEBUG nova.scheduler.client.report [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 534.710898] env[61972]: DEBUG nova.network.neutron [-] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.760753] env[61972]: INFO nova.scheduler.client.report [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Deleted allocations for instance e3a373f2-640f-479d-98f6-963a5fbc38ac [ 534.815744] env[61972]: DEBUG nova.network.neutron [-] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.820486] env[61972]: DEBUG nova.network.neutron [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.996267] env[61972]: DEBUG oslo_concurrency.lockutils [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] Releasing lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.996446] env[61972]: DEBUG nova.compute.manager [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Received event network-vif-deleted-447338cd-16a6-49c9-8cde-4aaf069132ff {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 534.996730] env[61972]: DEBUG nova.compute.manager [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Received event network-changed-7a5e8383-5128-474c-b293-634b1ebdb94f {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 534.996915] env[61972]: DEBUG nova.compute.manager [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Refreshing instance network info cache due to event network-changed-7a5e8383-5128-474c-b293-634b1ebdb94f. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 534.997157] env[61972]: DEBUG oslo_concurrency.lockutils [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] Acquiring lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.997294] env[61972]: DEBUG oslo_concurrency.lockutils [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] Acquired lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.997443] env[61972]: DEBUG nova.network.neutron [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Refreshing network info cache for port 7a5e8383-5128-474c-b293-634b1ebdb94f {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 535.087856] env[61972]: DEBUG nova.network.neutron [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.192880] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.843s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.193529] env[61972]: ERROR nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] Traceback (most recent call last): [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self.driver.spawn(context, instance, image_meta, [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] vm_ref = self.build_virtual_machine(instance, [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] vif_infos = vmwarevif.get_vif_info(self._session, [ 535.193529] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] for vif in network_info: [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return self._sync_wrapper(fn, *args, **kwargs) [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self.wait() [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self[:] = self._gt.wait() [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return self._exit_event.wait() [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] result = hub.switch() [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 535.193878] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return self.greenlet.switch() [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] result = function(*args, **kwargs) [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] return func(*args, **kwargs) [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] raise e [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] nwinfo = self.network_api.allocate_for_instance( [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] created_port_ids = self._update_ports_for_instance( [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] with excutils.save_and_reraise_exception(): [ 535.194714] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] self.force_reraise() [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] raise self.value [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] updated_port = self._update_port( [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] _ensure_no_port_binding_failure(port) [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] raise exception.PortBindingFailed(port_id=port['id']) [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] nova.exception.PortBindingFailed: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. [ 535.195091] env[61972]: ERROR nova.compute.manager [instance: 433465bd-370b-4af0-a491-e4321124deca] [ 535.195523] env[61972]: DEBUG nova.compute.utils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 535.195599] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.546s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.196977] env[61972]: INFO nova.compute.claims [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.200249] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Build of instance 433465bd-370b-4af0-a491-e4321124deca was re-scheduled: Binding failed for port b5ec1549-d0a9-4ea9-9ab9-80553e7b2aa4, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 535.200714] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 535.200957] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquiring lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.201220] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Acquired lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.201287] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 535.213284] env[61972]: INFO nova.compute.manager [-] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Took 1.03 seconds to deallocate network for instance. [ 535.215574] env[61972]: DEBUG nova.compute.claims [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 535.215751] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.270212] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be64f0dc-e9b3-4074-bfe6-51190cc32985 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "e3a373f2-640f-479d-98f6-963a5fbc38ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.610s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.272776] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "e3a373f2-640f-479d-98f6-963a5fbc38ac" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.478s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.272776] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86ff9a4d-216b-4f03-931f-d574461dda84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.284109] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3890f5-d6c3-406b-9338-783efc11e318 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.318368] env[61972]: INFO nova.compute.manager [-] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Took 1.04 seconds to deallocate network for instance. [ 535.324224] env[61972]: DEBUG nova.compute.claims [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 535.324521] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.325402] env[61972]: DEBUG oslo_concurrency.lockutils [req-02cd1ec6-59c2-4e46-8a28-574fe3f95ed2 req-37d776e0-09f8-460d-926f-6a1c09097544 service nova] Releasing lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.542604] env[61972]: DEBUG nova.network.neutron [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.594945] env[61972]: INFO nova.compute.manager [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] [instance: 4f70d05b-de38-41d5-b2ff-4856abd85ee4] Took 1.05 seconds to deallocate network for instance. [ 535.712273] env[61972]: DEBUG nova.network.neutron [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.747519] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.772773] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 535.815206] env[61972]: INFO nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e3a373f2-640f-479d-98f6-963a5fbc38ac] During the sync_power process the instance has moved from host None to host cpu-1 [ 535.815468] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "e3a373f2-640f-479d-98f6-963a5fbc38ac" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.877888] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.219307] env[61972]: DEBUG oslo_concurrency.lockutils [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] Releasing lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.222135] env[61972]: DEBUG nova.compute.manager [req-07c12a7b-cdfc-4723-84a2-50eddba45965 req-816c2e17-1193-4da0-9329-2eced482f13e service nova] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Received event network-vif-deleted-7a5e8383-5128-474c-b293-634b1ebdb94f {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 536.304056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.385466] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Releasing lock "refresh_cache-433465bd-370b-4af0-a491-e4321124deca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.386437] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 536.386437] env[61972]: DEBUG nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 536.386437] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 536.415161] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ab9d81-9284-4243-b33f-8477335767a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.424472] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3a3ad8-0246-4457-8045-a3cb506bcf76 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.457418] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.460542] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0f26de-8013-4704-a553-41e10dfa4323 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.469606] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e20697-135e-42a6-a909-e76254387f86 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.484340] env[61972]: DEBUG nova.compute.provider_tree [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.643189] env[61972]: INFO nova.scheduler.client.report [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Deleted allocations for instance 4f70d05b-de38-41d5-b2ff-4856abd85ee4 [ 536.717121] env[61972]: DEBUG nova.compute.manager [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Received event network-changed-1b583a76-0230-4efa-b353-d1b1706fec01 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 536.717121] env[61972]: DEBUG nova.compute.manager [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Refreshing instance network info cache due to event network-changed-1b583a76-0230-4efa-b353-d1b1706fec01. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 536.717121] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] Acquiring lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.717121] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] Acquired lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.717121] env[61972]: DEBUG nova.network.neutron [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Refreshing network info cache for port 1b583a76-0230-4efa-b353-d1b1706fec01 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 536.964428] env[61972]: DEBUG nova.network.neutron [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.989470] env[61972]: DEBUG nova.scheduler.client.report [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 537.044042] env[61972]: DEBUG nova.compute.manager [req-8ebc305e-95f8-4cb9-a7d8-2a630652c29e req-83ead49d-2500-4ccc-9f8b-e22f4f53f7e1 service nova] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Received event network-vif-deleted-14a2dc7b-cf4e-48dc-85af-bd4f0a160952 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 537.159680] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0a8f4b2f-e298-49b6-bb07-d21d1e2ea98c tempest-ImagesOneServerTestJSON-535526512 tempest-ImagesOneServerTestJSON-535526512-project-member] Lock "4f70d05b-de38-41d5-b2ff-4856abd85ee4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.045s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.247049] env[61972]: DEBUG nova.network.neutron [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.332602] env[61972]: DEBUG nova.network.neutron [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.470710] env[61972]: INFO nova.compute.manager [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] [instance: 433465bd-370b-4af0-a491-e4321124deca] Took 1.08 seconds to deallocate network for instance. [ 537.497044] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.497760] env[61972]: DEBUG nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 537.502112] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.845s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.502112] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.502112] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 537.502112] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.382s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.508466] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d128ac48-8309-4524-91fd-30fc2afad845 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.519388] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce5f9c5-257c-4c57-975b-020cd47a58b5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.537158] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533dad33-8374-4488-ad14-7f525116c1ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.548870] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df227a20-f55d-4769-8589-e0822950ce88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.583131] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 537.583457] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.664420] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 537.835931] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] Releasing lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.837037] env[61972]: DEBUG nova.compute.manager [req-d8a40cd8-9764-4ff2-b98d-74a1c7c64d7b req-a4bad002-ff0b-4d6d-9542-17af6d0e4893 service nova] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Received event network-vif-deleted-1b583a76-0230-4efa-b353-d1b1706fec01 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 537.959327] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquiring lock "24073cc5-cccd-4a1b-87d6-a8a6458251f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.960345] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Lock "24073cc5-cccd-4a1b-87d6-a8a6458251f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.011981] env[61972]: DEBUG nova.compute.utils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.017458] env[61972]: DEBUG nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 538.192255] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.203424] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ca4b67-9645-4d64-8598-e7b0db6fcd48 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.210823] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a8fb7e-75f2-4442-981e-a29d7d98fc30 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.240693] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4818a16-0323-4fd8-8479-7ab3c18076df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.248100] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea369457-9953-40b7-9e62-fbccbba8173a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.263017] env[61972]: DEBUG nova.compute.provider_tree [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.513624] env[61972]: INFO nova.scheduler.client.report [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Deleted allocations for instance 433465bd-370b-4af0-a491-e4321124deca [ 538.524812] env[61972]: DEBUG nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 538.769084] env[61972]: DEBUG nova.scheduler.client.report [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 539.038710] env[61972]: DEBUG oslo_concurrency.lockutils [None req-df39ce22-a481-496e-8ca0-6726cfbbb631 tempest-FloatingIPsAssociationNegativeTestJSON-1483450 tempest-FloatingIPsAssociationNegativeTestJSON-1483450-project-member] Lock "433465bd-370b-4af0-a491-e4321124deca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.846s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.274020] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.772s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.274665] env[61972]: ERROR nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Traceback (most recent call last): [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self.driver.spawn(context, instance, image_meta, [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] vm_ref = self.build_virtual_machine(instance, [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.274665] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] for vif in network_info: [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return self._sync_wrapper(fn, *args, **kwargs) [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self.wait() [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self[:] = self._gt.wait() [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return self._exit_event.wait() [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] result = hub.switch() [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.275046] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return self.greenlet.switch() [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] result = function(*args, **kwargs) [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] return func(*args, **kwargs) [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] raise e [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] nwinfo = self.network_api.allocate_for_instance( [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] created_port_ids = self._update_ports_for_instance( [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] with excutils.save_and_reraise_exception(): [ 539.275414] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] self.force_reraise() [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] raise self.value [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] updated_port = self._update_port( [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] _ensure_no_port_binding_failure(port) [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] raise exception.PortBindingFailed(port_id=port['id']) [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] nova.exception.PortBindingFailed: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. [ 539.275799] env[61972]: ERROR nova.compute.manager [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] [ 539.276121] env[61972]: DEBUG nova.compute.utils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 539.278740] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.850s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.285123] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Build of instance 96d406fc-7802-4d05-a9e7-f0fe5576aa74 was re-scheduled: Binding failed for port 447338cd-16a6-49c9-8cde-4aaf069132ff, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 539.285123] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 539.285123] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquiring lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.285123] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Acquired lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.285415] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 539.541221] env[61972]: DEBUG nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 539.552405] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 539.599967] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.600538] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.601535] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.601768] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.601921] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.602083] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.602298] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.602459] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.604597] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.604597] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.604597] env[61972]: DEBUG nova.virt.hardware [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.604597] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c48019-c2bc-40a8-90cc-4724f4d53104 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.613789] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0569ead4-fee1-4460-968f-6cc5c3c2f97b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.629677] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 539.639185] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 539.640798] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-793cd466-f0b8-4d99-8077-5328e31194f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.653085] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Created folder: OpenStack in parent group-v4. [ 539.653289] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Creating folder: Project (fd139ef77c1649aca614eac7bddfd2c8). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 539.653555] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-254f4d85-d464-49c7-95f7-2707687dda2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.664125] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Created folder: Project (fd139ef77c1649aca614eac7bddfd2c8) in parent group-v294799. [ 539.664125] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Creating folder: Instances. Parent ref: group-v294800. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 539.664313] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7827b69f-66b5-4f4d-8a74-74af3397280b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.672719] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Created folder: Instances in parent group-v294800. [ 539.672719] env[61972]: DEBUG oslo.service.loopingcall [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.672719] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 539.672885] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edff31ca-5262-4d09-91ab-8bf0edf3d15a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.690903] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 539.690903] env[61972]: value = "task-1389056" [ 539.690903] env[61972]: _type = "Task" [ 539.690903] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.699683] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389056, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.833820] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.029861] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfc3fb3-8b8e-43c8-ada4-b236a2e38f9c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.041394] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be7a345-8d91-4aca-a3fa-9943b7d3f353 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.079929] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ef272f-9f2c-4eb6-8392-31bab1b816eb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.092749] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e3216b-06e7-4dbd-96ee-011bfd666147 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.108683] env[61972]: DEBUG nova.compute.provider_tree [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.110902] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.198726] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquiring lock "9e258f66-df7b-4acf-a066-ba66958a7861" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.198726] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Lock "9e258f66-df7b-4acf-a066-ba66958a7861" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.219091] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389056, 'name': CreateVM_Task, 'duration_secs': 0.347493} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.219255] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 540.221027] env[61972]: DEBUG oslo_vmware.service [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1527cd38-431c-4fa0-92f3-4c70e233e3f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.230354] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.230576] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.231265] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 540.231540] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b87172c-d5da-4089-99dc-0228fc0da270 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.237686] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 540.237686] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525a7d58-6702-b068-f306-9ee882cb5de4" [ 540.237686] env[61972]: _type = "Task" [ 540.237686] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.255709] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525a7d58-6702-b068-f306-9ee882cb5de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.317082] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.615016] env[61972]: DEBUG nova.scheduler.client.report [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 540.755242] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.755242] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 540.755458] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.755585] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.755985] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 540.756326] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b22118d3-69f3-448d-b6ff-e880ec49c2b2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.776160] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 540.776355] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 540.777356] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a67006-d978-49f6-9aec-34dffa78ee64 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.785321] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a180ddcd-3b0f-4917-8a72-f093152a3672 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.790326] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 540.790326] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52dff174-b6df-54fe-013a-58ef7a531297" [ 540.790326] env[61972]: _type = "Task" [ 540.790326] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.800194] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52dff174-b6df-54fe-013a-58ef7a531297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.822971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Releasing lock "refresh_cache-96d406fc-7802-4d05-a9e7-f0fe5576aa74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.822971] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 540.822971] env[61972]: DEBUG nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 540.822971] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 540.862476] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.124362] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.846s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.125256] env[61972]: ERROR nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Traceback (most recent call last): [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self.driver.spawn(context, instance, image_meta, [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] vm_ref = self.build_virtual_machine(instance, [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.125256] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] for vif in network_info: [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return self._sync_wrapper(fn, *args, **kwargs) [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self.wait() [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self[:] = self._gt.wait() [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return self._exit_event.wait() [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] result = hub.switch() [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.125641] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return self.greenlet.switch() [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] result = function(*args, **kwargs) [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] return func(*args, **kwargs) [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] raise e [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] nwinfo = self.network_api.allocate_for_instance( [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] created_port_ids = self._update_ports_for_instance( [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] with excutils.save_and_reraise_exception(): [ 541.125982] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] self.force_reraise() [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] raise self.value [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] updated_port = self._update_port( [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] _ensure_no_port_binding_failure(port) [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] raise exception.PortBindingFailed(port_id=port['id']) [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] nova.exception.PortBindingFailed: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. [ 541.126333] env[61972]: ERROR nova.compute.manager [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] [ 541.126661] env[61972]: DEBUG nova.compute.utils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 541.129543] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.456s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.131353] env[61972]: INFO nova.compute.claims [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 541.134512] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Build of instance 0fae076c-ced2-4456-8223-2d71e78fabb4 was re-scheduled: Binding failed for port 7a5e8383-5128-474c-b293-634b1ebdb94f, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 541.135024] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 541.135317] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquiring lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.135430] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Acquired lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.135589] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 541.258500] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "107b6153-65ad-48e4-9810-113bfacdd3d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.258861] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "107b6153-65ad-48e4-9810-113bfacdd3d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.304199] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Preparing fetch location {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 541.304626] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Creating directory with path [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 541.305574] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d8ef47c-99d8-479b-bccc-9f8a8ce4cbbd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.327575] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Created directory with path [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 541.327575] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Fetch image to [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 541.327575] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Downloading image file data 79227ea9-188c-426d-a7d8-cb14b658f493 to [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk on the data store datastore2 {{(pid=61972) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 541.327781] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d4e2f2-4fb0-49bc-a29f-8fdffd363cf6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.336031] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71d7d65-7628-4e7b-aa2d-4b7fd9bcf910 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.350085] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bfb621-afb4-4d48-a555-113c4d8773d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.386793] env[61972]: DEBUG nova.network.neutron [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.389317] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd02c5b-842a-40b6-a170-6e31215424ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.396271] env[61972]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-511aa962-940f-4576-8dd8-7937baf2f3aa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.485715] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Downloading image file data 79227ea9-188c-426d-a7d8-cb14b658f493 to the data store datastore2 {{(pid=61972) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 541.572045] env[61972]: DEBUG oslo_vmware.rw_handles [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61972) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 541.706313] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.893311] env[61972]: INFO nova.compute.manager [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] [instance: 96d406fc-7802-4d05-a9e7-f0fe5576aa74] Took 1.07 seconds to deallocate network for instance. [ 542.072425] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.298138] env[61972]: DEBUG oslo_vmware.rw_handles [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Completed reading data from the image iterator. {{(pid=61972) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 542.298138] env[61972]: DEBUG oslo_vmware.rw_handles [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 542.437385] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Downloaded image file data 79227ea9-188c-426d-a7d8-cb14b658f493 to vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk on the data store datastore2 {{(pid=61972) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 542.439256] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Caching image {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 542.439698] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Copying Virtual Disk [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk to [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 542.441031] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63c4469b-58bf-4ecd-915c-e48d6918fec5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.453325] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 542.453325] env[61972]: value = "task-1389057" [ 542.453325] env[61972]: _type = "Task" [ 542.453325] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.460346] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fd0608-cd79-4c31-a483-a3d1ca3e06df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.466128] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.467770] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfb2234-d02d-4c62-8547-dc65152a4805 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.502852] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0fa431-ef31-4702-9363-7f5c6fdbaa61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.511813] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30ccf0c-45ea-4d6d-9c45-1e5beeb8c175 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.527379] env[61972]: DEBUG nova.compute.provider_tree [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.575025] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Releasing lock "refresh_cache-0fae076c-ced2-4456-8223-2d71e78fabb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.575336] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 542.575474] env[61972]: DEBUG nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 542.575661] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 542.604674] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquiring lock "2795b001-aaf2-4886-bba7-bd764c29638c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.604944] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Lock "2795b001-aaf2-4886-bba7-bd764c29638c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.632244] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.949862] env[61972]: INFO nova.scheduler.client.report [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Deleted allocations for instance 96d406fc-7802-4d05-a9e7-f0fe5576aa74 [ 542.970099] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389057, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.031036] env[61972]: DEBUG nova.scheduler.client.report [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 543.135121] env[61972]: DEBUG nova.network.neutron [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.466502] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.693012} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.466777] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Copied Virtual Disk [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk to [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 543.466948] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleting the datastore file [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 543.467211] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-445b0084-78f6-48a3-8ca6-8f094ee707af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.472180] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a64c1598-7708-4040-8c16-980f76c9212a tempest-InstanceActionsNegativeTestJSON-1892608288 tempest-InstanceActionsNegativeTestJSON-1892608288-project-member] Lock "96d406fc-7802-4d05-a9e7-f0fe5576aa74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.183s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.475931] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 543.475931] env[61972]: value = "task-1389058" [ 543.475931] env[61972]: _type = "Task" [ 543.475931] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.488474] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.540255] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.541229] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 543.546636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.711s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.548406] env[61972]: INFO nova.compute.claims [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.640433] env[61972]: INFO nova.compute.manager [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] [instance: 0fae076c-ced2-4456-8223-2d71e78fabb4] Took 1.06 seconds to deallocate network for instance. [ 543.975066] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 543.990704] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167329} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.991277] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 543.991570] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Moving file from [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12/79227ea9-188c-426d-a7d8-cb14b658f493 to [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493. {{(pid=61972) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 543.991903] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-87c583f5-9f1e-4662-adaa-b54fd1463fe1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.999568] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 543.999568] env[61972]: value = "task-1389059" [ 543.999568] env[61972]: _type = "Task" [ 543.999568] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.011757] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389059, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.056588] env[61972]: DEBUG nova.compute.utils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.062966] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 544.062966] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 544.188855] env[61972]: DEBUG nova.policy [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e329450badd345fda2775022a82e90f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09d85e0631a346c18c4f0f6cbc3b0a14', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 544.514262] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389059, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026607} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.515792] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] File moved {{(pid=61972) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 544.515792] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Cleaning up location [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 544.515792] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleting the datastore file [datastore2] vmware_temp/1d5ba080-3369-4686-97e9-9a3298ac1f12 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 544.516119] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c944d3f6-9e23-414f-afe8-8bcd134244cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.518914] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.525459] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 544.525459] env[61972]: value = "task-1389060" [ 544.525459] env[61972]: _type = "Task" [ 544.525459] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.545141] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.561981] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 544.708534] env[61972]: INFO nova.scheduler.client.report [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Deleted allocations for instance 0fae076c-ced2-4456-8223-2d71e78fabb4 [ 544.934778] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddbdf8d-ccf2-44f4-a23c-86a08be159f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.950261] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6272386b-fbc5-432c-a507-c11a8b58089a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.987486] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43ab76e-6f3c-43bd-8c04-06f630d573ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.996987] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397894c9-8689-498e-b05d-aa9632a1e152 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.010598] env[61972]: DEBUG nova.compute.provider_tree [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.040998] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025613} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.040998] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 545.040998] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3d9cd0f-3771-463f-ba0b-53b6d14e3f29 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.052393] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 545.052393] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]529f60d0-dd31-36d5-2ae6-4aadda2bf78f" [ 545.052393] env[61972]: _type = "Task" [ 545.052393] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.060800] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529f60d0-dd31-36d5-2ae6-4aadda2bf78f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.226840] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d295dcd4-3d03-453a-afa8-c614344ca111 tempest-ServerExternalEventsTest-1317683250 tempest-ServerExternalEventsTest-1317683250-project-member] Lock "0fae076c-ced2-4456-8223-2d71e78fabb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.457s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.380616] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Successfully created port: 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.516074] env[61972]: DEBUG nova.scheduler.client.report [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 545.569265] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529f60d0-dd31-36d5-2ae6-4aadda2bf78f, 'name': SearchDatastore_Task, 'duration_secs': 0.00889} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.569265] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.569265] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 72d434a7-ea70-4594-971f-7eec8ebea153/72d434a7-ea70-4594-971f-7eec8ebea153.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 545.569265] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb45f4d1-56c7-42fc-a1b2-a63e62ba81c6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.574590] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 545.581728] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 545.581728] env[61972]: value = "task-1389061" [ 545.581728] env[61972]: _type = "Task" [ 545.581728] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.595186] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.645493] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 545.645493] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 545.645493] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 545.645689] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 545.645689] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 545.645689] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 545.645689] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 545.645689] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 545.645838] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 545.645838] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 545.645838] env[61972]: DEBUG nova.virt.hardware [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 545.646587] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3604dd99-d555-4236-b13f-f8bb8f9ac0ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.656818] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58f0e96-dd5b-4a4f-bdeb-c69729fadec5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.731762] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 545.865391] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquiring lock "6858305a-6ab4-401d-ad1f-e6d21117d9e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.865391] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Lock "6858305a-6ab4-401d-ad1f-e6d21117d9e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.023438] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.023932] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 546.028674] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.813s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.096457] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389061, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502016} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.096457] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 72d434a7-ea70-4594-971f-7eec8ebea153/72d434a7-ea70-4594-971f-7eec8ebea153.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 546.096457] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 546.096457] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2aa9f56-c638-491b-992c-c01385363d19 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.103578] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 546.103578] env[61972]: value = "task-1389062" [ 546.103578] env[61972]: _type = "Task" [ 546.103578] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.110208] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.294848] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.537532] env[61972]: DEBUG nova.compute.utils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.549693] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 546.550810] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 546.614770] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063524} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.615522] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 546.616570] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9f21d5-72b7-481b-9636-10ab5f8def54 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.648628] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 72d434a7-ea70-4594-971f-7eec8ebea153/72d434a7-ea70-4594-971f-7eec8ebea153.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 546.649607] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2344d4c2-0118-4f28-8b09-72592167c3ac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.682243] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 546.682243] env[61972]: value = "task-1389063" [ 546.682243] env[61972]: _type = "Task" [ 546.682243] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.696907] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389063, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.943288] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e082c5ad-bd8b-488d-bbcd-eb41de4b6135 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.954924] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f266a761-3240-4c8c-b002-1f9f27da7ad6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.003333] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441e536e-8f0d-4683-a56b-5851c5796338 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.022734] env[61972]: DEBUG nova.policy [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6b7c5b037a54c8cbd151ad0f1875f37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbbaa322b60942819cfb147b5201daf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 547.029797] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529c2063-cb19-4dde-92a2-c188553c782f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.048945] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 547.051941] env[61972]: DEBUG nova.compute.provider_tree [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.196813] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389063, 'name': ReconfigVM_Task, 'duration_secs': 0.281186} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.196813] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 72d434a7-ea70-4594-971f-7eec8ebea153/72d434a7-ea70-4594-971f-7eec8ebea153.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 547.198351] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd1242cf-386c-48bf-8dbc-036df5143e4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.205990] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 547.205990] env[61972]: value = "task-1389064" [ 547.205990] env[61972]: _type = "Task" [ 547.205990] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.220342] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389064, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.560572] env[61972]: DEBUG nova.scheduler.client.report [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 547.736119] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389064, 'name': Rename_Task, 'duration_secs': 0.199937} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.736119] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 547.736119] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2612724f-96c6-449c-8a9f-492acf3411e3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.741702] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 547.741702] env[61972]: value = "task-1389065" [ 547.741702] env[61972]: _type = "Task" [ 547.741702] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.753243] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.078915] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.046s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.078915] env[61972]: ERROR nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Traceback (most recent call last): [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self.driver.spawn(context, instance, image_meta, [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.078915] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] vm_ref = self.build_virtual_machine(instance, [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] for vif in network_info: [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return self._sync_wrapper(fn, *args, **kwargs) [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self.wait() [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self[:] = self._gt.wait() [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return self._exit_event.wait() [ 548.079521] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] result = hub.switch() [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return self.greenlet.switch() [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] result = function(*args, **kwargs) [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] return func(*args, **kwargs) [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] raise e [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] nwinfo = self.network_api.allocate_for_instance( [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.079835] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] created_port_ids = self._update_ports_for_instance( [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] with excutils.save_and_reraise_exception(): [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] self.force_reraise() [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] raise self.value [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] updated_port = self._update_port( [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] _ensure_no_port_binding_failure(port) [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.080160] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] raise exception.PortBindingFailed(port_id=port['id']) [ 548.080444] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] nova.exception.PortBindingFailed: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. [ 548.080444] env[61972]: ERROR nova.compute.manager [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] [ 548.091050] env[61972]: DEBUG nova.compute.utils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 548.091050] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 548.091050] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.762s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.100577] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Build of instance 2d43aa8e-ea11-4209-b166-b87159a37e72 was re-scheduled: Binding failed for port 14a2dc7b-cf4e-48dc-85af-bd4f0a160952, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 548.101170] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 548.101422] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquiring lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.101607] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Acquired lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.102302] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 548.194629] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.194958] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.195044] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.195539] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.195539] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.195539] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.195692] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.195886] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.197295] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.197472] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.197672] env[61972]: DEBUG nova.virt.hardware [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.198975] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dc5019-48f4-4c3b-a836-4e21a9dd7eb9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.213747] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55800842-f8dc-488d-a047-31e53f2bd6d3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.271989] env[61972]: DEBUG oslo_vmware.api [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389065, 'name': PowerOnVM_Task, 'duration_secs': 0.444952} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.273316] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 548.273423] env[61972]: INFO nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Took 8.73 seconds to spawn the instance on the hypervisor. [ 548.273923] env[61972]: DEBUG nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 548.274910] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310a5eaf-be0c-4822-bc01-5977b95541a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.631136] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.702636] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.813378] env[61972]: INFO nova.compute.manager [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Took 21.19 seconds to build instance. [ 548.976806] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92294e53-507c-4af3-bf16-7ec01b261760 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.987312] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1774673-49fc-4f73-8920-5cace566570a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.022919] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a551b59-f153-4f1d-8ac2-619d7c900a7a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.031214] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ca0773-7cdc-4f51-9a6f-37a66d918141 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.048029] env[61972]: DEBUG nova.compute.provider_tree [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.181864] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Successfully created port: a020e457-148f-436e-8d92-e4822ddc9e60 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.208652] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Releasing lock "refresh_cache-2d43aa8e-ea11-4209-b166-b87159a37e72" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.210830] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 549.210830] env[61972]: DEBUG nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 549.210830] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 549.238584] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.315531] env[61972]: DEBUG oslo_concurrency.lockutils [None req-05c0a1fb-cafd-4a80-9e3e-0bfd42e6a3d7 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "72d434a7-ea70-4594-971f-7eec8ebea153" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.709s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.551432] env[61972]: DEBUG nova.scheduler.client.report [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 549.749204] env[61972]: DEBUG nova.network.neutron [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.819892] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 549.871077] env[61972]: ERROR nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 549.871077] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 549.871077] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.871077] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.871077] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.871077] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.871077] env[61972]: ERROR nova.compute.manager raise self.value [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.871077] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.871077] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.871077] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.871727] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.871727] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.871727] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 549.871727] env[61972]: ERROR nova.compute.manager [ 549.871727] env[61972]: Traceback (most recent call last): [ 549.871727] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.871727] env[61972]: listener.cb(fileno) [ 549.871727] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.871727] env[61972]: result = function(*args, **kwargs) [ 549.871727] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.871727] env[61972]: return func(*args, **kwargs) [ 549.871727] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 549.871727] env[61972]: raise e [ 549.871727] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 549.871727] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 549.871727] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.871727] env[61972]: created_port_ids = self._update_ports_for_instance( [ 549.871727] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.871727] env[61972]: with excutils.save_and_reraise_exception(): [ 549.871727] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.871727] env[61972]: self.force_reraise() [ 549.871727] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.871727] env[61972]: raise self.value [ 549.871727] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.871727] env[61972]: updated_port = self._update_port( [ 549.871727] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.871727] env[61972]: _ensure_no_port_binding_failure(port) [ 549.871727] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.871727] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.872540] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 549.872540] env[61972]: Removing descriptor: 17 [ 549.872540] env[61972]: ERROR nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Traceback (most recent call last): [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] yield resources [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self.driver.spawn(context, instance, image_meta, [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.872540] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] vm_ref = self.build_virtual_machine(instance, [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] for vif in network_info: [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return self._sync_wrapper(fn, *args, **kwargs) [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self.wait() [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self[:] = self._gt.wait() [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return self._exit_event.wait() [ 549.872926] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] result = hub.switch() [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return self.greenlet.switch() [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] result = function(*args, **kwargs) [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return func(*args, **kwargs) [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] raise e [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] nwinfo = self.network_api.allocate_for_instance( [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.873308] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] created_port_ids = self._update_ports_for_instance( [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] with excutils.save_and_reraise_exception(): [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self.force_reraise() [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] raise self.value [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] updated_port = self._update_port( [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] _ensure_no_port_binding_failure(port) [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.873687] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] raise exception.PortBindingFailed(port_id=port['id']) [ 549.874030] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 549.874030] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] [ 549.874030] env[61972]: INFO nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Terminating instance [ 550.057974] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.971s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.057974] env[61972]: ERROR nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Traceback (most recent call last): [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self.driver.spawn(context, instance, image_meta, [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.057974] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] vm_ref = self.build_virtual_machine(instance, [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] for vif in network_info: [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return self._sync_wrapper(fn, *args, **kwargs) [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self.wait() [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self[:] = self._gt.wait() [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return self._exit_event.wait() [ 550.058688] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] result = hub.switch() [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return self.greenlet.switch() [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] result = function(*args, **kwargs) [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] return func(*args, **kwargs) [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] raise e [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] nwinfo = self.network_api.allocate_for_instance( [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.059084] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] created_port_ids = self._update_ports_for_instance( [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] with excutils.save_and_reraise_exception(): [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] self.force_reraise() [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] raise self.value [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] updated_port = self._update_port( [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] _ensure_no_port_binding_failure(port) [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.059535] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] raise exception.PortBindingFailed(port_id=port['id']) [ 550.059871] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] nova.exception.PortBindingFailed: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. [ 550.059871] env[61972]: ERROR nova.compute.manager [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] [ 550.059871] env[61972]: DEBUG nova.compute.utils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 550.062235] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Build of instance 75e101c8-0ea7-40d1-a0ce-9a866b252772 was re-scheduled: Binding failed for port 1b583a76-0230-4efa-b353-d1b1706fec01, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 550.062721] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 550.062961] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.063120] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquired lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.063279] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.070396] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.763s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.070396] env[61972]: INFO nova.compute.claims [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.254198] env[61972]: INFO nova.compute.manager [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] [instance: 2d43aa8e-ea11-4209-b166-b87159a37e72] Took 1.04 seconds to deallocate network for instance. [ 550.370739] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.380315] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquiring lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.380545] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquired lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.381153] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.629982] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.961058] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.966186] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.035812] env[61972]: DEBUG nova.compute.manager [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Received event network-changed-00686c4d-cd65-4d4c-b7ea-3bfa3387ed64 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 551.035996] env[61972]: DEBUG nova.compute.manager [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Refreshing instance network info cache due to event network-changed-00686c4d-cd65-4d4c-b7ea-3bfa3387ed64. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 551.038490] env[61972]: DEBUG oslo_concurrency.lockutils [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] Acquiring lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.344843] env[61972]: INFO nova.scheduler.client.report [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Deleted allocations for instance 2d43aa8e-ea11-4209-b166-b87159a37e72 [ 551.426689] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acf09a2-33da-49ac-9025-4483941e51af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.433389] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.437653] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be726f3f-d3ab-4d74-b75e-07647033d7b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.479022] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Releasing lock "refresh_cache-75e101c8-0ea7-40d1-a0ce-9a866b252772" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.479022] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 551.479022] env[61972]: DEBUG nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 551.479022] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 551.481202] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36887487-4fd0-4e8a-8415-4773cfda6463 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.493085] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9239de9b-50a6-4d27-9a5c-9572e57292b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.508751] env[61972]: DEBUG nova.compute.provider_tree [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.578150] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.859805] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e11a5989-22a9-43a4-88da-18c9a8129cca tempest-ServersAdminNegativeTestJSON-1397369892 tempest-ServersAdminNegativeTestJSON-1397369892-project-member] Lock "2d43aa8e-ea11-4209-b166-b87159a37e72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.970s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.948514] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Releasing lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.949046] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 551.949784] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 551.949784] env[61972]: DEBUG oslo_concurrency.lockutils [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] Acquired lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.950017] env[61972]: DEBUG nova.network.neutron [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Refreshing network info cache for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 551.955576] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7fcfea1f-73f5-4311-8095-db57c70a3a86 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.967068] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca3a391-74ef-4f79-a723-3c9983ed07f6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.998764] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 96e5b238-aab4-4f75-abe8-f5a14b015099 could not be found. [ 551.998999] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 551.999488] env[61972]: INFO nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Took 0.05 seconds to destroy the instance on the hypervisor. [ 551.999488] env[61972]: DEBUG oslo.service.loopingcall [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.999662] env[61972]: DEBUG nova.compute.manager [-] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 551.999790] env[61972]: DEBUG nova.network.neutron [-] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 552.013868] env[61972]: DEBUG nova.scheduler.client.report [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 552.083942] env[61972]: DEBUG nova.network.neutron [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.201097] env[61972]: DEBUG nova.network.neutron [-] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.279963] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "ba985ad1-390d-4a2e-ad96-c273231f8549" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.280231] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "ba985ad1-390d-4a2e-ad96-c273231f8549" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.362858] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.363154] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.366212] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 552.491347] env[61972]: DEBUG nova.network.neutron [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.510873] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "56b1ea80-3109-4212-959b-0e5fb2fc66d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.511381] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "56b1ea80-3109-4212-959b-0e5fb2fc66d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.526494] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.526494] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 552.532884] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.949s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.585960] env[61972]: INFO nova.compute.manager [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 75e101c8-0ea7-40d1-a0ce-9a866b252772] Took 1.11 seconds to deallocate network for instance. [ 552.635853] env[61972]: DEBUG nova.network.neutron [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.707469] env[61972]: DEBUG nova.network.neutron [-] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.913999] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.040414] env[61972]: DEBUG nova.compute.utils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.042448] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 553.042655] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 553.143100] env[61972]: DEBUG oslo_concurrency.lockutils [req-45e18de4-17ff-417d-8282-ac38c45f8b57 req-b5dd9fa7-9b0a-44e9-9337-5c296c281ae2 service nova] Releasing lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.217456] env[61972]: INFO nova.compute.manager [-] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Took 1.22 seconds to deallocate network for instance. [ 553.222433] env[61972]: DEBUG nova.compute.claims [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 553.222433] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.258793] env[61972]: DEBUG nova.policy [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b12122c73ec440098daf417330d4443f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d3e3e9945fc4774a35ad817e8e312ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 553.348357] env[61972]: DEBUG nova.compute.manager [None req-4ea0bcc5-a6a8-4cbe-b442-512162d74f45 tempest-ServerDiagnosticsV248Test-870806284 tempest-ServerDiagnosticsV248Test-870806284-project-admin] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 553.349861] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076312bc-0732-4256-99c4-9c1d48ca1e7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.361245] env[61972]: INFO nova.compute.manager [None req-4ea0bcc5-a6a8-4cbe-b442-512162d74f45 tempest-ServerDiagnosticsV248Test-870806284 tempest-ServerDiagnosticsV248Test-870806284-project-admin] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Retrieving diagnostics [ 553.362095] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be68682-3d3e-47c1-830f-86f7e99f7939 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.552246] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 553.630765] env[61972]: INFO nova.scheduler.client.report [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Deleted allocations for instance 75e101c8-0ea7-40d1-a0ce-9a866b252772 [ 554.090235] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 75e101c8-0ea7-40d1-a0ce-9a866b252772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.090470] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 72d434a7-ea70-4594-971f-7eec8ebea153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.090595] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 96e5b238-aab4-4f75-abe8-f5a14b015099 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.090712] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance d32a7937-792a-4959-bded-819463472399 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.141140] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cf49ea92-aa64-4a15-8a87-4b9c9b187e63 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "75e101c8-0ea7-40d1-a0ce-9a866b252772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.457s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.573623] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 554.595106] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 7a7c98db-6ed4-4908-adc8-53347d693dca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.595303] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance fb28710d-cd15-41d4-b7aa-8389093ea9a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.618129] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 554.618374] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 554.618522] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.618696] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 554.618833] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.618996] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 554.619213] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 554.619368] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 554.619537] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 554.619704] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 554.619866] env[61972]: DEBUG nova.virt.hardware [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 554.620766] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8240b417-1bee-4ed0-adc1-20e4a7ff0a95 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.632836] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661e4266-4f5d-45f5-b6f2-212ac800f62d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.650669] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 554.655054] env[61972]: ERROR nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 554.655054] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 554.655054] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 554.655054] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 554.655054] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.655054] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.655054] env[61972]: ERROR nova.compute.manager raise self.value [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 554.655054] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 554.655054] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.655054] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 554.656164] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.656164] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 554.656164] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 554.656164] env[61972]: ERROR nova.compute.manager [ 554.656164] env[61972]: Traceback (most recent call last): [ 554.656164] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 554.656164] env[61972]: listener.cb(fileno) [ 554.656164] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.656164] env[61972]: result = function(*args, **kwargs) [ 554.656164] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 554.656164] env[61972]: return func(*args, **kwargs) [ 554.656164] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 554.656164] env[61972]: raise e [ 554.656164] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 554.656164] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 554.656164] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 554.656164] env[61972]: created_port_ids = self._update_ports_for_instance( [ 554.656164] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 554.656164] env[61972]: with excutils.save_and_reraise_exception(): [ 554.656164] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.656164] env[61972]: self.force_reraise() [ 554.656164] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.656164] env[61972]: raise self.value [ 554.656164] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 554.656164] env[61972]: updated_port = self._update_port( [ 554.656164] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.656164] env[61972]: _ensure_no_port_binding_failure(port) [ 554.656164] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.656164] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 554.658009] env[61972]: nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 554.658009] env[61972]: Removing descriptor: 19 [ 554.658009] env[61972]: ERROR nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] Traceback (most recent call last): [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] yield resources [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self.driver.spawn(context, instance, image_meta, [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self._vmops.spawn(context, instance, image_meta, injected_files, [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 554.658009] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] vm_ref = self.build_virtual_machine(instance, [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] vif_infos = vmwarevif.get_vif_info(self._session, [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] for vif in network_info: [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return self._sync_wrapper(fn, *args, **kwargs) [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self.wait() [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self[:] = self._gt.wait() [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return self._exit_event.wait() [ 554.659314] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] result = hub.switch() [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return self.greenlet.switch() [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] result = function(*args, **kwargs) [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return func(*args, **kwargs) [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] raise e [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] nwinfo = self.network_api.allocate_for_instance( [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 554.660334] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] created_port_ids = self._update_ports_for_instance( [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] with excutils.save_and_reraise_exception(): [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self.force_reraise() [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] raise self.value [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] updated_port = self._update_port( [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] _ensure_no_port_binding_failure(port) [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.664116] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] raise exception.PortBindingFailed(port_id=port['id']) [ 554.664608] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 554.664608] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] [ 554.664608] env[61972]: INFO nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Terminating instance [ 554.805695] env[61972]: DEBUG nova.compute.manager [req-82aafa56-da47-4804-96c9-977dbca7a1ef req-9e224f09-081d-452d-a139-24e9bd982569 service nova] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Received event network-vif-deleted-00686c4d-cd65-4d4c-b7ea-3bfa3387ed64 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 554.840607] env[61972]: DEBUG nova.compute.manager [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] [instance: d32a7937-792a-4959-bded-819463472399] Received event network-changed-a020e457-148f-436e-8d92-e4822ddc9e60 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 554.840846] env[61972]: DEBUG nova.compute.manager [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] [instance: d32a7937-792a-4959-bded-819463472399] Refreshing instance network info cache due to event network-changed-a020e457-148f-436e-8d92-e4822ddc9e60. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 554.841083] env[61972]: DEBUG oslo_concurrency.lockutils [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] Acquiring lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.842503] env[61972]: DEBUG oslo_concurrency.lockutils [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] Acquired lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.842503] env[61972]: DEBUG nova.network.neutron [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] [instance: d32a7937-792a-4959-bded-819463472399] Refreshing network info cache for port a020e457-148f-436e-8d92-e4822ddc9e60 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 554.967348] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Successfully created port: 2473edfc-d94b-4017-8570-fd43fee873db {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.103871] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 24073cc5-cccd-4a1b-87d6-a8a6458251f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.161838] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.184554] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.416242] env[61972]: DEBUG nova.network.neutron [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] [instance: d32a7937-792a-4959-bded-819463472399] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 555.607539] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9e258f66-df7b-4acf-a066-ba66958a7861 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.723927] env[61972]: DEBUG nova.network.neutron [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] [instance: d32a7937-792a-4959-bded-819463472399] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.114549] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 107b6153-65ad-48e4-9810-113bfacdd3d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.228273] env[61972]: DEBUG oslo_concurrency.lockutils [req-7d05eaca-b807-4b04-9c17-f14ef162a712 req-5cf858c0-cc3f-4d8d-9569-3ac89d06f846 service nova] Releasing lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.229027] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.229269] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.618861] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 2795b001-aaf2-4886-bba7-bd764c29638c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.770416] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.125587] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 6858305a-6ab4-401d-ad1f-e6d21117d9e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.145470] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.634431] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance ba985ad1-390d-4a2e-ad96-c273231f8549 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.654023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.654023] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 557.654023] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 557.654023] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5fc264d-aeb1-4cca-a936-01064ad3480d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.672845] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487deb86-a93d-4408-8fd2-b23b0e3e829b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.703278] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d32a7937-792a-4959-bded-819463472399 could not be found. [ 557.703527] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 557.703706] env[61972]: INFO nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Took 0.05 seconds to destroy the instance on the hypervisor. [ 557.703945] env[61972]: DEBUG oslo.service.loopingcall [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.705281] env[61972]: DEBUG nova.compute.manager [-] [instance: d32a7937-792a-4959-bded-819463472399] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 557.705381] env[61972]: DEBUG nova.network.neutron [-] [instance: d32a7937-792a-4959-bded-819463472399] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 557.768197] env[61972]: DEBUG nova.network.neutron [-] [instance: d32a7937-792a-4959-bded-819463472399] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.978252] env[61972]: DEBUG nova.compute.manager [req-1f10d0d9-e91c-4768-9635-3effdb9f4a95 req-3025f83c-5dd4-48e1-a429-ce5e5364985e service nova] [instance: d32a7937-792a-4959-bded-819463472399] Received event network-vif-deleted-a020e457-148f-436e-8d92-e4822ddc9e60 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 558.138549] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.278636] env[61972]: DEBUG nova.network.neutron [-] [instance: d32a7937-792a-4959-bded-819463472399] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.647785] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 56b1ea80-3109-4212-959b-0e5fb2fc66d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.647785] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 558.647785] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 558.787317] env[61972]: INFO nova.compute.manager [-] [instance: d32a7937-792a-4959-bded-819463472399] Took 1.08 seconds to deallocate network for instance. [ 558.791286] env[61972]: DEBUG nova.compute.claims [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 558.791647] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.918036] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8c00a4-2822-4f0b-a4a0-e78ef5d321c8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.925650] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177fa7fd-3196-4dfd-8ba1-5456f3cb572f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.956528] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b65ab51-4233-4845-acc1-a18f3f7d8a8d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.967019] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb340ee-1bc2-4712-a1ae-614e0d5de0cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.977958] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.481568] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 559.843061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquiring lock "489fc6c6-c9a5-40a8-81a4-7677f55743fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.843061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Lock "489fc6c6-c9a5-40a8-81a4-7677f55743fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.994428] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 559.994657] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.462s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.995431] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.803s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.996796] env[61972]: INFO nova.compute.claims [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.092494] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "cf7ea49c-91ff-4c81-803c-90608c2849dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.092984] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "cf7ea49c-91ff-4c81-803c-90608c2849dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.009121] env[61972]: DEBUG nova.compute.manager [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Received event network-changed-2473edfc-d94b-4017-8570-fd43fee873db {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 561.009396] env[61972]: DEBUG nova.compute.manager [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Refreshing instance network info cache due to event network-changed-2473edfc-d94b-4017-8570-fd43fee873db. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 561.009565] env[61972]: DEBUG oslo_concurrency.lockutils [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] Acquiring lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.009670] env[61972]: DEBUG oslo_concurrency.lockutils [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] Acquired lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.009826] env[61972]: DEBUG nova.network.neutron [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Refreshing network info cache for port 2473edfc-d94b-4017-8570-fd43fee873db {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 561.348074] env[61972]: ERROR nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 561.348074] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 561.348074] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.348074] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.348074] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.348074] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.348074] env[61972]: ERROR nova.compute.manager raise self.value [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.348074] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 561.348074] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.348074] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 561.348923] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.348923] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 561.348923] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 561.348923] env[61972]: ERROR nova.compute.manager [ 561.349976] env[61972]: Traceback (most recent call last): [ 561.350097] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 561.350097] env[61972]: listener.cb(fileno) [ 561.350097] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.350097] env[61972]: result = function(*args, **kwargs) [ 561.350097] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 561.350097] env[61972]: return func(*args, **kwargs) [ 561.350097] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 561.350097] env[61972]: raise e [ 561.350353] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 561.350353] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 561.350353] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.350353] env[61972]: created_port_ids = self._update_ports_for_instance( [ 561.350353] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.350353] env[61972]: with excutils.save_and_reraise_exception(): [ 561.350353] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.350353] env[61972]: self.force_reraise() [ 561.350353] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.350353] env[61972]: raise self.value [ 561.350353] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.350353] env[61972]: updated_port = self._update_port( [ 561.350353] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.350353] env[61972]: _ensure_no_port_binding_failure(port) [ 561.350353] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.350353] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 561.350353] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 561.350353] env[61972]: Removing descriptor: 15 [ 561.351548] env[61972]: ERROR nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Traceback (most recent call last): [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] yield resources [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self.driver.spawn(context, instance, image_meta, [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] vm_ref = self.build_virtual_machine(instance, [ 561.351548] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] for vif in network_info: [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return self._sync_wrapper(fn, *args, **kwargs) [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self.wait() [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self[:] = self._gt.wait() [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return self._exit_event.wait() [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.351949] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] result = hub.switch() [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return self.greenlet.switch() [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] result = function(*args, **kwargs) [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return func(*args, **kwargs) [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] raise e [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] nwinfo = self.network_api.allocate_for_instance( [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] created_port_ids = self._update_ports_for_instance( [ 561.352366] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] with excutils.save_and_reraise_exception(): [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self.force_reraise() [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] raise self.value [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] updated_port = self._update_port( [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] _ensure_no_port_binding_failure(port) [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] raise exception.PortBindingFailed(port_id=port['id']) [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 561.353177] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] [ 561.353578] env[61972]: INFO nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Terminating instance [ 561.356484] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bc972d-24c6-48d1-b1e3-8f7873a8a18f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.367018] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5483c2-6d88-447a-93ed-8cddee44bcc4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.400667] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396a5d2f-ec39-43f0-ae3c-efd2ca3d8479 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.408513] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d0a2bf-a894-4d95-8ba0-d91bf3a91d46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.422877] env[61972]: DEBUG nova.compute.provider_tree [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.542639] env[61972]: DEBUG nova.network.neutron [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.748406] env[61972]: DEBUG nova.network.neutron [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.863180] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.926766] env[61972]: DEBUG nova.scheduler.client.report [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 562.251659] env[61972]: DEBUG oslo_concurrency.lockutils [req-b3be629a-3159-476f-ac82-0bec2e0678a4 req-a2661bbe-673c-4518-97af-6deb780707df service nova] Releasing lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.252972] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquired lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.252972] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.430528] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.432806] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 562.439157] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.325s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.442336] env[61972]: INFO nova.compute.claims [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.813918] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.953423] env[61972]: DEBUG nova.compute.utils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 562.961264] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 562.961497] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.124737] env[61972]: DEBUG nova.policy [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8169f662eb7c45129939ab888ea28b8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49cd9c78e79746669c36b36e4ede0ed3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 563.150188] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.471344] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 563.656415] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Releasing lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.659916] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 563.659916] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 563.659916] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48f765a4-d3e3-45df-a819-60fd8c50b261 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.668920] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295ad8d7-00c3-4c85-bc51-f7cdd0d6449f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.701164] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquiring lock "81775c2c-328e-4e33-8ff7-40a9f638ec76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.702177] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Lock "81775c2c-328e-4e33-8ff7-40a9f638ec76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.713716] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fb28710d-cd15-41d4-b7aa-8389093ea9a8 could not be found. [ 563.713843] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 563.714028] env[61972]: INFO nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Took 0.06 seconds to destroy the instance on the hypervisor. [ 563.714618] env[61972]: DEBUG oslo.service.loopingcall [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.714618] env[61972]: DEBUG nova.compute.manager [-] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 563.714618] env[61972]: DEBUG nova.network.neutron [-] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 563.763959] env[61972]: DEBUG nova.network.neutron [-] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.836260] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc0a635-3da9-4be7-a652-ba838347eac0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.845870] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3614718-5179-4faf-92ae-ff048d729fee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.885169] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28852a5-a2f4-4977-addf-3056aceb3cc8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.893256] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca63c86-32d0-4737-b0fb-86eb7cc0d787 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.907541] env[61972]: DEBUG nova.compute.provider_tree [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.027387] env[61972]: DEBUG nova.compute.manager [req-3055c98c-757f-437d-8811-3261a4dd755c req-9713ce85-74a3-4870-bf25-6daf1847aff8 service nova] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Received event network-vif-deleted-2473edfc-d94b-4017-8570-fd43fee873db {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 564.272540] env[61972]: DEBUG nova.network.neutron [-] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.410945] env[61972]: DEBUG nova.scheduler.client.report [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 564.488135] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 564.525287] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.525287] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.525287] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.525524] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.525524] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.525662] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.525863] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.526015] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.526822] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.527017] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.527198] env[61972]: DEBUG nova.virt.hardware [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.528071] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bb139f-9c6e-4f0a-b795-26a2f1ed468e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.539723] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b42ac1b-a815-429a-a388-86370189fd35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.775792] env[61972]: INFO nova.compute.manager [-] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Took 1.06 seconds to deallocate network for instance. [ 564.779458] env[61972]: DEBUG nova.compute.claims [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 564.779614] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.917800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.918952] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 564.927138] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.405s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.927138] env[61972]: INFO nova.compute.claims [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.953209] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Successfully created port: 2079a444-eddf-4901-9beb-69ec949ef2ce {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.434144] env[61972]: DEBUG nova.compute.utils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.435547] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 565.437297] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 565.716323] env[61972]: DEBUG nova.policy [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db90c676212d4718aeca3d6a81056d00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8705142b4c6c452e9cea12bcb31e11fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 565.941089] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 566.138764] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquiring lock "aad5f67a-ad8f-4d0d-977c-1e65ada7682a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.138996] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Lock "aad5f67a-ad8f-4d0d-977c-1e65ada7682a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.243623] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "98905e39-fda3-47a8-867d-130d76894e53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.244363] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "98905e39-fda3-47a8-867d-130d76894e53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.293565] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afe89db-2ddf-43db-a8b3-82e2989cd3f4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.301182] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40cb000-00c6-4afc-ace7-26e9df61898e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.339058] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facebf16-82ff-4933-b03d-8c3b22b723cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.346511] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f495fb-1b42-4c85-b955-2a6aab4d9e58 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.359897] env[61972]: DEBUG nova.compute.provider_tree [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.387440] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Successfully created port: 07b03cac-6b8d-49a6-ab08-029bb9658a4e {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.527054] env[61972]: DEBUG nova.compute.manager [None req-fb107a40-ff6b-4c03-a0f4-1da979d4f0bc tempest-ServerDiagnosticsV248Test-870806284 tempest-ServerDiagnosticsV248Test-870806284-project-admin] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 566.529548] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbee1bbf-44c6-4e7c-a04c-08a9b29b5ee5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.537303] env[61972]: INFO nova.compute.manager [None req-fb107a40-ff6b-4c03-a0f4-1da979d4f0bc tempest-ServerDiagnosticsV248Test-870806284 tempest-ServerDiagnosticsV248Test-870806284-project-admin] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Retrieving diagnostics [ 566.538111] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825ffe4a-7e6c-4d3a-bcdc-1579732de25d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.867211] env[61972]: DEBUG nova.scheduler.client.report [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 566.966081] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 567.000907] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.001075] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.001230] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.001408] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.001566] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.001724] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.002201] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.002710] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.002922] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.005197] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.005197] env[61972]: DEBUG nova.virt.hardware [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.005197] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75354e6b-a541-4be2-970e-ce078ce815a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.016570] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7762fe4e-dc4b-4b7f-914b-65b1bd9bb8a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.374964] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.374964] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 567.377060] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.083s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.378470] env[61972]: INFO nova.compute.claims [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.712483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "a48c6a10-0c00-40f8-831f-713213390dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.712483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "a48c6a10-0c00-40f8-831f-713213390dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.886240] env[61972]: DEBUG nova.compute.utils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.891818] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 567.895309] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 568.015943] env[61972]: DEBUG nova.policy [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbb84a5dee4944aab7645d46ca68def3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6341aef2a0b14302a186b1c5c05ecb22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 568.190809] env[61972]: ERROR nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 568.190809] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.190809] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.190809] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.190809] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.190809] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.190809] env[61972]: ERROR nova.compute.manager raise self.value [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.190809] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 568.190809] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.190809] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 568.192391] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.192391] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 568.192391] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 568.192391] env[61972]: ERROR nova.compute.manager [ 568.192391] env[61972]: Traceback (most recent call last): [ 568.192391] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 568.192391] env[61972]: listener.cb(fileno) [ 568.192391] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.192391] env[61972]: result = function(*args, **kwargs) [ 568.192391] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 568.192391] env[61972]: return func(*args, **kwargs) [ 568.192391] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 568.192391] env[61972]: raise e [ 568.192391] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.192391] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 568.192391] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.192391] env[61972]: created_port_ids = self._update_ports_for_instance( [ 568.192391] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.192391] env[61972]: with excutils.save_and_reraise_exception(): [ 568.192391] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.192391] env[61972]: self.force_reraise() [ 568.192391] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.192391] env[61972]: raise self.value [ 568.192391] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.192391] env[61972]: updated_port = self._update_port( [ 568.192391] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.192391] env[61972]: _ensure_no_port_binding_failure(port) [ 568.192391] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.192391] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 568.194212] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 568.194212] env[61972]: Removing descriptor: 15 [ 568.194212] env[61972]: ERROR nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Traceback (most recent call last): [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] yield resources [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self.driver.spawn(context, instance, image_meta, [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.194212] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] vm_ref = self.build_virtual_machine(instance, [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] for vif in network_info: [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return self._sync_wrapper(fn, *args, **kwargs) [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self.wait() [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self[:] = self._gt.wait() [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return self._exit_event.wait() [ 568.194720] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] result = hub.switch() [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return self.greenlet.switch() [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] result = function(*args, **kwargs) [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return func(*args, **kwargs) [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] raise e [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] nwinfo = self.network_api.allocate_for_instance( [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.195454] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] created_port_ids = self._update_ports_for_instance( [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] with excutils.save_and_reraise_exception(): [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self.force_reraise() [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] raise self.value [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] updated_port = self._update_port( [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] _ensure_no_port_binding_failure(port) [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.195843] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] raise exception.PortBindingFailed(port_id=port['id']) [ 568.196214] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 568.196214] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] [ 568.196214] env[61972]: INFO nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Terminating instance [ 568.203256] env[61972]: ERROR nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 568.203256] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.203256] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.203256] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.203256] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.203256] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.203256] env[61972]: ERROR nova.compute.manager raise self.value [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.203256] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 568.203256] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.203256] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 568.203775] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.203775] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 568.203775] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 568.203775] env[61972]: ERROR nova.compute.manager [ 568.203775] env[61972]: Traceback (most recent call last): [ 568.203775] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 568.203775] env[61972]: listener.cb(fileno) [ 568.203775] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.203775] env[61972]: result = function(*args, **kwargs) [ 568.203775] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 568.203775] env[61972]: return func(*args, **kwargs) [ 568.203775] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 568.203775] env[61972]: raise e [ 568.203775] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.203775] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 568.203775] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.203775] env[61972]: created_port_ids = self._update_ports_for_instance( [ 568.203775] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.203775] env[61972]: with excutils.save_and_reraise_exception(): [ 568.203775] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.203775] env[61972]: self.force_reraise() [ 568.203775] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.203775] env[61972]: raise self.value [ 568.203775] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.203775] env[61972]: updated_port = self._update_port( [ 568.203775] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.203775] env[61972]: _ensure_no_port_binding_failure(port) [ 568.203775] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.203775] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 568.204802] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 568.204802] env[61972]: Removing descriptor: 19 [ 568.204802] env[61972]: ERROR nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Traceback (most recent call last): [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] yield resources [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self.driver.spawn(context, instance, image_meta, [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.204802] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] vm_ref = self.build_virtual_machine(instance, [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] for vif in network_info: [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return self._sync_wrapper(fn, *args, **kwargs) [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self.wait() [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self[:] = self._gt.wait() [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return self._exit_event.wait() [ 568.205204] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] result = hub.switch() [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return self.greenlet.switch() [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] result = function(*args, **kwargs) [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return func(*args, **kwargs) [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] raise e [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] nwinfo = self.network_api.allocate_for_instance( [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.205603] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] created_port_ids = self._update_ports_for_instance( [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] with excutils.save_and_reraise_exception(): [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self.force_reraise() [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] raise self.value [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] updated_port = self._update_port( [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] _ensure_no_port_binding_failure(port) [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.205963] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] raise exception.PortBindingFailed(port_id=port['id']) [ 568.206306] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 568.206306] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] [ 568.206306] env[61972]: INFO nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Terminating instance [ 568.392826] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 568.700743] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquiring lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.701190] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquired lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.702080] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 568.709177] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquiring lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.709431] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquired lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.709634] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 568.760398] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Successfully created port: 47ef9d15-a729-4946-8b82-f4e24ae804d9 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.837342] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b33a38c-3f68-4b14-a4be-f577842d83c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.845698] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d940d9-67b9-4054-8a01-5002e159efb3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.884779] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35418c91-fde5-48c1-9ad0-d50e7c39b612 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.893755] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ae7a75-196f-46b9-8344-3f5a099d61d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.913467] env[61972]: DEBUG nova.compute.provider_tree [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.067114] env[61972]: DEBUG nova.compute.manager [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Received event network-changed-2079a444-eddf-4901-9beb-69ec949ef2ce {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 569.067114] env[61972]: DEBUG nova.compute.manager [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Refreshing instance network info cache due to event network-changed-2079a444-eddf-4901-9beb-69ec949ef2ce. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 569.067114] env[61972]: DEBUG oslo_concurrency.lockutils [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] Acquiring lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.089749] env[61972]: DEBUG nova.compute.manager [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Received event network-changed-07b03cac-6b8d-49a6-ab08-029bb9658a4e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 569.090140] env[61972]: DEBUG nova.compute.manager [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Refreshing instance network info cache due to event network-changed-07b03cac-6b8d-49a6-ab08-029bb9658a4e. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 569.090532] env[61972]: DEBUG oslo_concurrency.lockutils [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] Acquiring lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.231234] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.233556] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.324827] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquiring lock "3f4a6562-4c28-479c-8665-fb61c2d64dae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.325084] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Lock "3f4a6562-4c28-479c-8665-fb61c2d64dae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.328856] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "b8e485a2-3c56-4871-be93-59359e465cd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.329090] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "b8e485a2-3c56-4871-be93-59359e465cd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.357401] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "72d434a7-ea70-4594-971f-7eec8ebea153" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.357401] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "72d434a7-ea70-4594-971f-7eec8ebea153" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.357401] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "72d434a7-ea70-4594-971f-7eec8ebea153-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.357401] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "72d434a7-ea70-4594-971f-7eec8ebea153-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.357655] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "72d434a7-ea70-4594-971f-7eec8ebea153-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.359036] env[61972]: INFO nova.compute.manager [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Terminating instance [ 569.368171] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.389830] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.403637] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 569.416680] env[61972]: DEBUG nova.scheduler.client.report [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 569.442865] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.444844] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.444844] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.444844] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.444844] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.444844] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.445221] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.445221] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.445221] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.445221] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.445221] env[61972]: DEBUG nova.virt.hardware [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.446180] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e065aa-919a-4f6f-955c-49faad9fab3c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.455364] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6df595-0e6b-496a-ba58-717a5c5c42b2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.863619] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "refresh_cache-72d434a7-ea70-4594-971f-7eec8ebea153" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.863817] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquired lock "refresh_cache-72d434a7-ea70-4594-971f-7eec8ebea153" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.863993] env[61972]: DEBUG nova.network.neutron [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.872271] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Releasing lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.872271] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 569.872271] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 569.872271] env[61972]: DEBUG oslo_concurrency.lockutils [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] Acquired lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.872271] env[61972]: DEBUG nova.network.neutron [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Refreshing network info cache for port 2079a444-eddf-4901-9beb-69ec949ef2ce {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 569.872539] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e817037-88de-44b7-8744-48d3191b3668 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.883257] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6199a344-b61d-4f28-922d-6c6786315ee4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.898438] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Releasing lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.898925] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 569.899374] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 569.900137] env[61972]: DEBUG oslo_concurrency.lockutils [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] Acquired lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.900369] env[61972]: DEBUG nova.network.neutron [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Refreshing network info cache for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 569.903059] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f66f5d04-1274-4904-aa64-5784dfc63b35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.922154] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981c1853-29cf-49b9-b055-30ff3ab5a8e6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.934757] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a7c98db-6ed4-4908-adc8-53347d693dca could not be found. [ 569.935068] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 569.935297] env[61972]: INFO nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Took 0.06 seconds to destroy the instance on the hypervisor. [ 569.935587] env[61972]: DEBUG oslo.service.loopingcall [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.937127] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.937127] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 569.943034] env[61972]: DEBUG nova.compute.manager [-] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 569.943034] env[61972]: DEBUG nova.network.neutron [-] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.943034] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.572s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.943841] env[61972]: INFO nova.compute.claims [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.960177] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24073cc5-cccd-4a1b-87d6-a8a6458251f9 could not be found. [ 569.960643] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 569.961028] env[61972]: INFO nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Took 0.06 seconds to destroy the instance on the hypervisor. [ 569.961403] env[61972]: DEBUG oslo.service.loopingcall [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.961776] env[61972]: DEBUG nova.compute.manager [-] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 569.962141] env[61972]: DEBUG nova.network.neutron [-] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.973212] env[61972]: DEBUG nova.network.neutron [-] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.992059] env[61972]: DEBUG nova.network.neutron [-] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.063240] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "3d32ec82-e623-4bbb-93c2-d39c934b4890" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.063472] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "3d32ec82-e623-4bbb-93c2-d39c934b4890" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.391783] env[61972]: DEBUG nova.network.neutron [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.401112] env[61972]: DEBUG nova.network.neutron [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.436534] env[61972]: DEBUG nova.network.neutron [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.443385] env[61972]: DEBUG nova.compute.utils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.445479] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 570.445699] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 570.481187] env[61972]: DEBUG nova.network.neutron [-] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.496738] env[61972]: DEBUG nova.network.neutron [-] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.559089] env[61972]: ERROR nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 570.559089] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 570.559089] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.559089] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.559089] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.559089] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.559089] env[61972]: ERROR nova.compute.manager raise self.value [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.559089] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 570.559089] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.559089] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 570.560394] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.560394] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 570.560394] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 570.560394] env[61972]: ERROR nova.compute.manager [ 570.560394] env[61972]: Traceback (most recent call last): [ 570.560394] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 570.560394] env[61972]: listener.cb(fileno) [ 570.560394] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.560394] env[61972]: result = function(*args, **kwargs) [ 570.560394] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.560394] env[61972]: return func(*args, **kwargs) [ 570.560394] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 570.560394] env[61972]: raise e [ 570.560394] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 570.560394] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 570.560394] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.560394] env[61972]: created_port_ids = self._update_ports_for_instance( [ 570.560394] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.560394] env[61972]: with excutils.save_and_reraise_exception(): [ 570.560394] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.560394] env[61972]: self.force_reraise() [ 570.560394] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.560394] env[61972]: raise self.value [ 570.560394] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.560394] env[61972]: updated_port = self._update_port( [ 570.560394] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.560394] env[61972]: _ensure_no_port_binding_failure(port) [ 570.560394] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.560394] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 570.561651] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 570.561651] env[61972]: Removing descriptor: 17 [ 570.561651] env[61972]: ERROR nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Traceback (most recent call last): [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] yield resources [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self.driver.spawn(context, instance, image_meta, [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.561651] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] vm_ref = self.build_virtual_machine(instance, [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] for vif in network_info: [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return self._sync_wrapper(fn, *args, **kwargs) [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self.wait() [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self[:] = self._gt.wait() [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return self._exit_event.wait() [ 570.562323] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] result = hub.switch() [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return self.greenlet.switch() [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] result = function(*args, **kwargs) [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return func(*args, **kwargs) [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] raise e [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] nwinfo = self.network_api.allocate_for_instance( [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.562736] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] created_port_ids = self._update_ports_for_instance( [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] with excutils.save_and_reraise_exception(): [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self.force_reraise() [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] raise self.value [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] updated_port = self._update_port( [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] _ensure_no_port_binding_failure(port) [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.563117] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] raise exception.PortBindingFailed(port_id=port['id']) [ 570.563495] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 570.563495] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] [ 570.563495] env[61972]: INFO nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Terminating instance [ 570.565030] env[61972]: DEBUG nova.network.neutron [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.632585] env[61972]: DEBUG nova.network.neutron [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.753857] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.754139] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.780184] env[61972]: DEBUG nova.policy [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a2aee7a1b34c4092c2a52cb7683528', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05f1f715efc54b2abc89921fd29b2ca0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 570.784587] env[61972]: DEBUG nova.network.neutron [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.949440] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 570.989405] env[61972]: INFO nova.compute.manager [-] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Took 1.05 seconds to deallocate network for instance. [ 570.996311] env[61972]: DEBUG nova.compute.claims [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 570.996311] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.000124] env[61972]: INFO nova.compute.manager [-] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Took 1.04 seconds to deallocate network for instance. [ 571.002819] env[61972]: DEBUG nova.compute.claims [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 571.002990] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.070301] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Releasing lock "refresh_cache-72d434a7-ea70-4594-971f-7eec8ebea153" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.070872] env[61972]: DEBUG nova.compute.manager [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 571.071432] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 571.072177] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caa5f0b-fb04-4dd7-b004-3c483893503c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.077621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquiring lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.077779] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquired lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.078012] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 571.087391] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 571.087391] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d96481a3-20a8-494b-81d6-fa77d12c97d6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.092415] env[61972]: DEBUG oslo_vmware.api [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 571.092415] env[61972]: value = "task-1389066" [ 571.092415] env[61972]: _type = "Task" [ 571.092415] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.105413] env[61972]: DEBUG oslo_vmware.api [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.137480] env[61972]: DEBUG oslo_concurrency.lockutils [req-5c174ca0-72b4-40bf-be89-d62320e855f1 req-90f15cd2-846f-4ab9-bff8-547d5482461f service nova] Releasing lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.287056] env[61972]: DEBUG oslo_concurrency.lockutils [req-8c8799bd-e319-4957-b736-2d892cd10c4d req-2bdd918e-a2b7-4dff-8f8e-909daaa3b9b2 service nova] Releasing lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.323889] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Successfully created port: 7ef17580-83f5-4048-999d-d75acd9e4f06 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.376044] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6016639-57a8-4db0-87e6-e937cdb832f0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.387992] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50150246-1e0c-49ed-ab89-410c80ebd648 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.423654] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8249f93d-ed9c-4544-ac63-1d01e69be508 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.432359] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e510b0dd-fa10-4f12-aa06-21ca725c6ea7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.448029] env[61972]: DEBUG nova.compute.provider_tree [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.604735] env[61972]: DEBUG oslo_vmware.api [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389066, 'name': PowerOffVM_Task, 'duration_secs': 0.133311} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.604990] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 571.605281] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 571.605408] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c339c16e-d9a9-4c17-bff6-10f96e78428b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.627365] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.633595] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 571.633824] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 571.634071] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleting the datastore file [datastore2] 72d434a7-ea70-4594-971f-7eec8ebea153 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 571.634786] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e62edf72-2665-460b-81f3-0c627c4d8888 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.642023] env[61972]: DEBUG oslo_vmware.api [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for the task: (returnval){ [ 571.642023] env[61972]: value = "task-1389068" [ 571.642023] env[61972]: _type = "Task" [ 571.642023] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.651878] env[61972]: DEBUG oslo_vmware.api [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.768018] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.950804] env[61972]: DEBUG nova.scheduler.client.report [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 571.959488] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquiring lock "5aba271f-72bb-4847-8c87-18adda584a74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.959792] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Lock "5aba271f-72bb-4847-8c87-18adda584a74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.965266] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 571.995900] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.996290] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.996371] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.998350] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.998350] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.998464] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.998648] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.998796] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.998954] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.999130] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.999297] env[61972]: DEBUG nova.virt.hardware [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 572.000483] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3dd956-2883-4f76-9b58-789ed1c2c47a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.009642] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d197778-d4b6-4e4f-bb33-fade997d7842 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.029918] env[61972]: DEBUG nova.compute.manager [req-7497a9e4-b04b-4b7d-be94-c7ce6c88db84 req-9f1a384a-e3b4-48aa-896b-d37b1f3d7d44 service nova] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Received event network-vif-deleted-2079a444-eddf-4901-9beb-69ec949ef2ce {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 572.086463] env[61972]: DEBUG nova.compute.manager [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Received event network-vif-deleted-07b03cac-6b8d-49a6-ab08-029bb9658a4e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 572.086599] env[61972]: DEBUG nova.compute.manager [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Received event network-changed-47ef9d15-a729-4946-8b82-f4e24ae804d9 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 572.086760] env[61972]: DEBUG nova.compute.manager [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Refreshing instance network info cache due to event network-changed-47ef9d15-a729-4946-8b82-f4e24ae804d9. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 572.086948] env[61972]: DEBUG oslo_concurrency.lockutils [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] Acquiring lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.154806] env[61972]: DEBUG oslo_vmware.api [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Task: {'id': task-1389068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096545} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.155067] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 572.155250] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 572.155419] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 572.155584] env[61972]: INFO nova.compute.manager [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Took 1.08 seconds to destroy the instance on the hypervisor. [ 572.155819] env[61972]: DEBUG oslo.service.loopingcall [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.156695] env[61972]: DEBUG nova.compute.manager [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 572.156797] env[61972]: DEBUG nova.network.neutron [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 572.186201] env[61972]: DEBUG nova.network.neutron [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.275031] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Releasing lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.275031] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 572.275031] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.275454] env[61972]: DEBUG oslo_concurrency.lockutils [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] Acquired lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.275644] env[61972]: DEBUG nova.network.neutron [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Refreshing network info cache for port 47ef9d15-a729-4946-8b82-f4e24ae804d9 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 572.280625] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df639024-812b-4c4e-9870-a092e91a7c61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.299612] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becc6b47-465d-441a-a610-710bf443c0de {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.326986] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9e258f66-df7b-4acf-a066-ba66958a7861 could not be found. [ 572.326986] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 572.326986] env[61972]: INFO nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Took 0.05 seconds to destroy the instance on the hypervisor. [ 572.330546] env[61972]: DEBUG oslo.service.loopingcall [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.330546] env[61972]: DEBUG nova.compute.manager [-] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 572.330546] env[61972]: DEBUG nova.network.neutron [-] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 572.360697] env[61972]: DEBUG nova.network.neutron [-] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.459884] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.463818] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 572.463818] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.549s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.464737] env[61972]: INFO nova.compute.claims [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.690601] env[61972]: DEBUG nova.network.neutron [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.801166] env[61972]: DEBUG nova.network.neutron [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.868658] env[61972]: DEBUG nova.network.neutron [-] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.892121] env[61972]: DEBUG nova.network.neutron [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.902882] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquiring lock "9bfde590-fe6c-404d-88ad-9da1763c0870" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.903497] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Lock "9bfde590-fe6c-404d-88ad-9da1763c0870" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.972594] env[61972]: DEBUG nova.compute.utils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.979959] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 572.981028] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 573.109101] env[61972]: DEBUG nova.policy [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fc0aa686ea848f0be5293ec6f869bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f792bb41b7d4185b02ff116b06f76e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 573.193749] env[61972]: INFO nova.compute.manager [-] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Took 1.04 seconds to deallocate network for instance. [ 573.371814] env[61972]: INFO nova.compute.manager [-] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Took 1.04 seconds to deallocate network for instance. [ 573.376894] env[61972]: DEBUG nova.compute.claims [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 573.377327] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.385485] env[61972]: ERROR nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 573.385485] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.385485] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 573.385485] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 573.385485] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.385485] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.385485] env[61972]: ERROR nova.compute.manager raise self.value [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 573.385485] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 573.385485] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.385485] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 573.386040] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.386040] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 573.386040] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 573.386040] env[61972]: ERROR nova.compute.manager [ 573.386040] env[61972]: Traceback (most recent call last): [ 573.386040] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 573.386040] env[61972]: listener.cb(fileno) [ 573.386040] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.386040] env[61972]: result = function(*args, **kwargs) [ 573.386040] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 573.386040] env[61972]: return func(*args, **kwargs) [ 573.386040] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 573.386040] env[61972]: raise e [ 573.386040] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.386040] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 573.386040] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 573.386040] env[61972]: created_port_ids = self._update_ports_for_instance( [ 573.386040] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 573.386040] env[61972]: with excutils.save_and_reraise_exception(): [ 573.386040] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.386040] env[61972]: self.force_reraise() [ 573.386040] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.386040] env[61972]: raise self.value [ 573.386040] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 573.386040] env[61972]: updated_port = self._update_port( [ 573.386040] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.386040] env[61972]: _ensure_no_port_binding_failure(port) [ 573.386040] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.386040] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 573.386948] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 573.386948] env[61972]: Removing descriptor: 21 [ 573.386948] env[61972]: ERROR nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Traceback (most recent call last): [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] yield resources [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self.driver.spawn(context, instance, image_meta, [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.386948] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] vm_ref = self.build_virtual_machine(instance, [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] for vif in network_info: [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return self._sync_wrapper(fn, *args, **kwargs) [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self.wait() [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self[:] = self._gt.wait() [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return self._exit_event.wait() [ 573.387449] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] result = hub.switch() [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return self.greenlet.switch() [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] result = function(*args, **kwargs) [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return func(*args, **kwargs) [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] raise e [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] nwinfo = self.network_api.allocate_for_instance( [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 573.387850] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] created_port_ids = self._update_ports_for_instance( [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] with excutils.save_and_reraise_exception(): [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self.force_reraise() [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] raise self.value [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] updated_port = self._update_port( [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] _ensure_no_port_binding_failure(port) [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.388280] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] raise exception.PortBindingFailed(port_id=port['id']) [ 573.388710] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 573.388710] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] [ 573.388710] env[61972]: INFO nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Terminating instance [ 573.394347] env[61972]: DEBUG oslo_concurrency.lockutils [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] Releasing lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.394579] env[61972]: DEBUG nova.compute.manager [req-b52ce6c1-d910-4144-9460-d69bda1e95dc req-9be37a7b-31dd-4f4e-9463-c499bf3e079f service nova] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Received event network-vif-deleted-47ef9d15-a729-4946-8b82-f4e24ae804d9 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 573.481155] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 573.704793] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.800919] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Successfully created port: 4f050603-36be-4dc6-902d-a372ebcf824a {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.864193] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b72bba1-c2e1-4d1b-a86a-cb3bba7f3f4f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.872793] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a0b6df-46ac-49e1-be0a-faf29d5200ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.909799] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.909979] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquired lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.910170] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 573.912199] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa7e47b-4a9c-45d6-aecc-4efeb8430798 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.920477] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55325afa-b05a-4dd7-859a-b5e1924bcddf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.937281] env[61972]: DEBUG nova.compute.provider_tree [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.422871] env[61972]: DEBUG nova.compute.manager [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Received event network-changed-7ef17580-83f5-4048-999d-d75acd9e4f06 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 574.423228] env[61972]: DEBUG nova.compute.manager [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Refreshing instance network info cache due to event network-changed-7ef17580-83f5-4048-999d-d75acd9e4f06. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 574.423280] env[61972]: DEBUG oslo_concurrency.lockutils [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] Acquiring lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.440959] env[61972]: DEBUG nova.scheduler.client.report [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 574.451076] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.496087] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 574.525923] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 574.526206] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 574.526378] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.526532] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 574.527022] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.527642] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 574.527642] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 574.527642] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 574.527860] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 574.529049] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 574.529453] env[61972]: DEBUG nova.virt.hardware [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 574.530719] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb0c411-2639-43ac-af67-5e73853ce3a3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.540801] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d6aa26-dc42-40ad-835c-289fdb051ec7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.546039] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.948677] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.948978] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 574.952918] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.730s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.049076] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Releasing lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.049563] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 575.049756] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 575.050082] env[61972]: DEBUG oslo_concurrency.lockutils [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] Acquired lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.050252] env[61972]: DEBUG nova.network.neutron [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Refreshing network info cache for port 7ef17580-83f5-4048-999d-d75acd9e4f06 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 575.054431] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c04743b5-fb47-43ab-8149-24a347747ce9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.068162] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77afa022-b403-4967-b02d-acf793ec4d4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.096594] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 107b6153-65ad-48e4-9810-113bfacdd3d6 could not be found. [ 575.096830] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 575.096993] env[61972]: INFO nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 575.097248] env[61972]: DEBUG oslo.service.loopingcall [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.097448] env[61972]: DEBUG nova.compute.manager [-] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 575.097541] env[61972]: DEBUG nova.network.neutron [-] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 575.125489] env[61972]: DEBUG nova.network.neutron [-] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.454446] env[61972]: DEBUG nova.compute.utils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.455794] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 575.455964] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 575.579320] env[61972]: DEBUG nova.network.neutron [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.619518] env[61972]: DEBUG nova.policy [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ea62ff37442443b977fa00ae956be69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5069db5a016f47eca003fa3678009393', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 575.629933] env[61972]: DEBUG nova.network.neutron [-] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.685791] env[61972]: DEBUG nova.network.neutron [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.856428] env[61972]: ERROR nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 575.856428] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 575.856428] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 575.856428] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 575.856428] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.856428] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.856428] env[61972]: ERROR nova.compute.manager raise self.value [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 575.856428] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 575.856428] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.856428] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 575.856922] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.856922] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 575.856922] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 575.856922] env[61972]: ERROR nova.compute.manager [ 575.856922] env[61972]: Traceback (most recent call last): [ 575.856922] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 575.856922] env[61972]: listener.cb(fileno) [ 575.856922] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.856922] env[61972]: result = function(*args, **kwargs) [ 575.856922] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 575.856922] env[61972]: return func(*args, **kwargs) [ 575.856922] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 575.856922] env[61972]: raise e [ 575.856922] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 575.856922] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 575.856922] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 575.856922] env[61972]: created_port_ids = self._update_ports_for_instance( [ 575.856922] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 575.856922] env[61972]: with excutils.save_and_reraise_exception(): [ 575.856922] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.856922] env[61972]: self.force_reraise() [ 575.856922] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.856922] env[61972]: raise self.value [ 575.856922] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 575.856922] env[61972]: updated_port = self._update_port( [ 575.856922] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.856922] env[61972]: _ensure_no_port_binding_failure(port) [ 575.856922] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.856922] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 575.857751] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 575.857751] env[61972]: Removing descriptor: 19 [ 575.857751] env[61972]: ERROR nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Traceback (most recent call last): [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] yield resources [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self.driver.spawn(context, instance, image_meta, [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.857751] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] vm_ref = self.build_virtual_machine(instance, [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] for vif in network_info: [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return self._sync_wrapper(fn, *args, **kwargs) [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self.wait() [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self[:] = self._gt.wait() [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return self._exit_event.wait() [ 575.858123] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] result = hub.switch() [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return self.greenlet.switch() [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] result = function(*args, **kwargs) [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return func(*args, **kwargs) [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] raise e [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] nwinfo = self.network_api.allocate_for_instance( [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 575.858516] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] created_port_ids = self._update_ports_for_instance( [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] with excutils.save_and_reraise_exception(): [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self.force_reraise() [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] raise self.value [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] updated_port = self._update_port( [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] _ensure_no_port_binding_failure(port) [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.858909] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] raise exception.PortBindingFailed(port_id=port['id']) [ 575.859277] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 575.859277] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] [ 575.859277] env[61972]: INFO nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Terminating instance [ 575.892755] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf249c8-ab05-4849-89d4-0ac2e934f7cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.900211] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74ae777-5cd5-4e50-b536-5e8641b72168 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.937271] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed89bef-7933-42d9-8492-3f88a243dd6a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.941516] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1679ce5-6768-4183-a99f-1843dffae15f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.956015] env[61972]: DEBUG nova.compute.provider_tree [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.963431] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 576.132562] env[61972]: INFO nova.compute.manager [-] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Took 1.03 seconds to deallocate network for instance. [ 576.134986] env[61972]: DEBUG nova.compute.claims [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 576.135192] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.189334] env[61972]: DEBUG oslo_concurrency.lockutils [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] Releasing lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.192178] env[61972]: DEBUG nova.compute.manager [req-89f102fc-ee37-4cbf-938f-1dfb53db7e86 req-31468f33-93e9-4180-8298-7dbe596467df service nova] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Received event network-vif-deleted-7ef17580-83f5-4048-999d-d75acd9e4f06 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 576.363460] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquiring lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.364339] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquired lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.364339] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.395155] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Successfully created port: 6ddf3917-6f4b-4b0b-9b61-19ef9c485793 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.462029] env[61972]: DEBUG nova.scheduler.client.report [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 576.877669] env[61972]: DEBUG nova.compute.manager [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Received event network-changed-4f050603-36be-4dc6-902d-a372ebcf824a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 576.877893] env[61972]: DEBUG nova.compute.manager [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Refreshing instance network info cache due to event network-changed-4f050603-36be-4dc6-902d-a372ebcf824a. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 576.878199] env[61972]: DEBUG oslo_concurrency.lockutils [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] Acquiring lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.937092] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.966390] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.014s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.967037] env[61972]: ERROR nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Traceback (most recent call last): [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self.driver.spawn(context, instance, image_meta, [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] vm_ref = self.build_virtual_machine(instance, [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.967037] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] for vif in network_info: [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return self._sync_wrapper(fn, *args, **kwargs) [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self.wait() [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self[:] = self._gt.wait() [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return self._exit_event.wait() [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] result = hub.switch() [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.967448] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return self.greenlet.switch() [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] result = function(*args, **kwargs) [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] return func(*args, **kwargs) [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] raise e [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] nwinfo = self.network_api.allocate_for_instance( [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] created_port_ids = self._update_ports_for_instance( [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] with excutils.save_and_reraise_exception(): [ 576.967890] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] self.force_reraise() [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] raise self.value [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] updated_port = self._update_port( [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] _ensure_no_port_binding_failure(port) [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] raise exception.PortBindingFailed(port_id=port['id']) [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] nova.exception.PortBindingFailed: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. [ 576.968343] env[61972]: ERROR nova.compute.manager [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] [ 576.968673] env[61972]: DEBUG nova.compute.utils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 576.969508] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.785s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.970921] env[61972]: INFO nova.compute.claims [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.974363] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Build of instance 96e5b238-aab4-4f75-abe8-f5a14b015099 was re-scheduled: Binding failed for port 00686c4d-cd65-4d4c-b7ea-3bfa3387ed64, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 576.974805] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 576.975039] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquiring lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.975188] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Acquired lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.975342] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.977112] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 577.020786] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 577.020978] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 577.021146] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.021999] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 577.022683] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.022908] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 577.023156] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 577.023313] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 577.023478] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 577.024022] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 577.024022] env[61972]: DEBUG nova.virt.hardware [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 577.025097] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab8253e-d004-4556-8638-229aec3bc6c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.038567] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7139bbff-585c-449d-96a0-e54b46f6dbec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.150345] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.514737] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.657466] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Releasing lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.657466] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 577.657466] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 577.657466] env[61972]: DEBUG oslo_concurrency.lockutils [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] Acquired lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.657656] env[61972]: DEBUG nova.network.neutron [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Refreshing network info cache for port 4f050603-36be-4dc6-902d-a372ebcf824a {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 577.659157] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf62ac8f-358b-4962-a444-4819b8e96be8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.670845] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd376b3-a12d-4a90-9e0c-e165500bed45 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.696874] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2795b001-aaf2-4886-bba7-bd764c29638c could not be found. [ 577.696874] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 577.696874] env[61972]: INFO nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 577.697935] env[61972]: DEBUG oslo.service.loopingcall [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.697935] env[61972]: DEBUG nova.compute.manager [-] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 577.697935] env[61972]: DEBUG nova.network.neutron [-] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 577.732548] env[61972]: DEBUG nova.network.neutron [-] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.927268] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.202097] env[61972]: DEBUG nova.network.neutron [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.239203] env[61972]: DEBUG nova.network.neutron [-] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.357276] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquiring lock "5c036232-736c-4c34-a2b7-7de517b9cd50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.363855] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "5c036232-736c-4c34-a2b7-7de517b9cd50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.007s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.428881] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Releasing lock "refresh_cache-96e5b238-aab4-4f75-abe8-f5a14b015099" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.429170] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 578.429358] env[61972]: DEBUG nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 578.429532] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 578.440850] env[61972]: DEBUG nova.network.neutron [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.519381] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5aba965-6c9a-4723-8c86-5b31bfa6ea29 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.530369] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43b02ce-a0af-42c9-b48f-8daa3655b5f8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.569678] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8814078-7642-492e-b2e2-1f211a5f3103 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.577912] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9e08ab-6434-4b0e-828e-83781002f928 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.591821] env[61972]: DEBUG nova.compute.provider_tree [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.617809] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.745710] env[61972]: INFO nova.compute.manager [-] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Took 1.05 seconds to deallocate network for instance. [ 578.751024] env[61972]: DEBUG nova.compute.claims [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 578.753878] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.943173] env[61972]: DEBUG oslo_concurrency.lockutils [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] Releasing lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.943448] env[61972]: DEBUG nova.compute.manager [req-f84c5bd1-d8d1-4776-a7e7-a4ea4be46e73 req-d489629a-462b-4e4e-ad63-28a5923d82d8 service nova] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Received event network-vif-deleted-4f050603-36be-4dc6-902d-a372ebcf824a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 579.099573] env[61972]: DEBUG nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 579.121715] env[61972]: DEBUG nova.network.neutron [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.541435] env[61972]: ERROR nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 579.541435] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 579.541435] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.541435] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.541435] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.541435] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.541435] env[61972]: ERROR nova.compute.manager raise self.value [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.541435] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 579.541435] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.541435] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 579.542352] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.542352] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 579.542352] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 579.542352] env[61972]: ERROR nova.compute.manager [ 579.542352] env[61972]: Traceback (most recent call last): [ 579.542352] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 579.542352] env[61972]: listener.cb(fileno) [ 579.542352] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.542352] env[61972]: result = function(*args, **kwargs) [ 579.542352] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 579.542352] env[61972]: return func(*args, **kwargs) [ 579.542352] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 579.542352] env[61972]: raise e [ 579.542352] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 579.542352] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 579.542352] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.542352] env[61972]: created_port_ids = self._update_ports_for_instance( [ 579.542352] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.542352] env[61972]: with excutils.save_and_reraise_exception(): [ 579.542352] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.542352] env[61972]: self.force_reraise() [ 579.542352] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.542352] env[61972]: raise self.value [ 579.542352] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.542352] env[61972]: updated_port = self._update_port( [ 579.542352] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.542352] env[61972]: _ensure_no_port_binding_failure(port) [ 579.542352] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.542352] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 579.543310] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 579.543310] env[61972]: Removing descriptor: 21 [ 579.544072] env[61972]: ERROR nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Traceback (most recent call last): [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] yield resources [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self.driver.spawn(context, instance, image_meta, [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] vm_ref = self.build_virtual_machine(instance, [ 579.544072] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] for vif in network_info: [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return self._sync_wrapper(fn, *args, **kwargs) [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self.wait() [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self[:] = self._gt.wait() [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return self._exit_event.wait() [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 579.544479] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] result = hub.switch() [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return self.greenlet.switch() [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] result = function(*args, **kwargs) [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return func(*args, **kwargs) [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] raise e [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] nwinfo = self.network_api.allocate_for_instance( [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] created_port_ids = self._update_ports_for_instance( [ 579.544953] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] with excutils.save_and_reraise_exception(): [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self.force_reraise() [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] raise self.value [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] updated_port = self._update_port( [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] _ensure_no_port_binding_failure(port) [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] raise exception.PortBindingFailed(port_id=port['id']) [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 579.545644] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] [ 579.546155] env[61972]: INFO nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Terminating instance [ 579.578254] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquiring lock "eeb44b48-ed08-4f20-9498-b0eed38a00a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.578500] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Lock "eeb44b48-ed08-4f20-9498-b0eed38a00a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.603912] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.604538] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 579.610673] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.816s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.624932] env[61972]: INFO nova.compute.manager [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] [instance: 96e5b238-aab4-4f75-abe8-f5a14b015099] Took 1.19 seconds to deallocate network for instance. [ 579.737971] env[61972]: DEBUG nova.compute.manager [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Received event network-changed-6ddf3917-6f4b-4b0b-9b61-19ef9c485793 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 579.737971] env[61972]: DEBUG nova.compute.manager [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Refreshing instance network info cache due to event network-changed-6ddf3917-6f4b-4b0b-9b61-19ef9c485793. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 579.737971] env[61972]: DEBUG oslo_concurrency.lockutils [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] Acquiring lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.737971] env[61972]: DEBUG oslo_concurrency.lockutils [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] Acquired lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.737971] env[61972]: DEBUG nova.network.neutron [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Refreshing network info cache for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 580.054407] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquiring lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.111110] env[61972]: DEBUG nova.compute.utils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.114862] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 580.121299] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 580.196246] env[61972]: DEBUG nova.policy [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77f085d6aafe41328b41f3c0ddc3079e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb754ac92c5648ed888fdf68ccd14ced', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 580.312904] env[61972]: DEBUG nova.network.neutron [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.498914] env[61972]: DEBUG nova.network.neutron [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.623403] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 580.682964] env[61972]: INFO nova.scheduler.client.report [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Deleted allocations for instance 96e5b238-aab4-4f75-abe8-f5a14b015099 [ 580.705307] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e5da12-ed66-4313-8b38-4bdfb9d86a9d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.723877] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8842df54-e20c-4a71-b5c3-b3ca8dc6e849 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.762479] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e016185-b410-451b-9dfa-6bcdcc067921 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.771704] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53816002-b846-4c47-9d56-3cf04fa94eca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.788799] env[61972]: DEBUG nova.compute.provider_tree [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.938179] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Successfully created port: 409c65e1-161b-4663-9c58-b262e71200df {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.006152] env[61972]: DEBUG oslo_concurrency.lockutils [req-148ebcba-b598-4c8a-a1da-fc68f875e416 req-9685af3e-a184-4756-b060-b883cfad3d70 service nova] Releasing lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.006748] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquired lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.007216] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 581.193486] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dcada28b-6111-470a-9d20-54c85a482cdf tempest-VolumesAssistedSnapshotsTest-1708784017 tempest-VolumesAssistedSnapshotsTest-1708784017-project-member] Lock "96e5b238-aab4-4f75-abe8-f5a14b015099" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.550s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.291181] env[61972]: DEBUG nova.scheduler.client.report [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 581.538936] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.637020] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 581.663630] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.663875] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.664047] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.664238] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.664382] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.664521] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.664720] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.664872] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.665092] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.665275] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.665441] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.666310] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a16aa1-6b69-472d-969d-4411c222873a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.674444] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce14845a-f130-4d53-9c68-25be97ad451d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.699280] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 581.736105] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.799019] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.189s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.799019] env[61972]: ERROR nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] Traceback (most recent call last): [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self.driver.spawn(context, instance, image_meta, [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self._vmops.spawn(context, instance, image_meta, injected_files, [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 581.799019] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] vm_ref = self.build_virtual_machine(instance, [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] vif_infos = vmwarevif.get_vif_info(self._session, [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] for vif in network_info: [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return self._sync_wrapper(fn, *args, **kwargs) [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self.wait() [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self[:] = self._gt.wait() [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return self._exit_event.wait() [ 581.799520] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] result = hub.switch() [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return self.greenlet.switch() [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] result = function(*args, **kwargs) [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] return func(*args, **kwargs) [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] raise e [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] nwinfo = self.network_api.allocate_for_instance( [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 581.800131] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] created_port_ids = self._update_ports_for_instance( [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] with excutils.save_and_reraise_exception(): [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] self.force_reraise() [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] raise self.value [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] updated_port = self._update_port( [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] _ensure_no_port_binding_failure(port) [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 581.800821] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] raise exception.PortBindingFailed(port_id=port['id']) [ 581.801277] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] nova.exception.PortBindingFailed: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. [ 581.801277] env[61972]: ERROR nova.compute.manager [instance: d32a7937-792a-4959-bded-819463472399] [ 581.801277] env[61972]: DEBUG nova.compute.utils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 581.802498] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.023s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.809410] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Build of instance d32a7937-792a-4959-bded-819463472399 was re-scheduled: Binding failed for port a020e457-148f-436e-8d92-e4822ddc9e60, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 581.809410] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 581.809410] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.809410] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.809410] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 582.045090] env[61972]: DEBUG nova.compute.manager [req-6f049f78-1c33-4498-96a7-350a8b31347d req-de80fc15-e421-4c59-b5cf-6e69ee17e635 service nova] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Received event network-vif-deleted-6ddf3917-6f4b-4b0b-9b61-19ef9c485793 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 582.231074] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.240908] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Releasing lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.240908] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 582.242842] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 582.242842] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbe1d35f-470f-4dbe-88e9-00bb4ea9681e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.254117] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b98de4-108c-40fa-b459-3a23bd244806 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.280087] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6858305a-6ab4-401d-ad1f-e6d21117d9e3 could not be found. [ 582.280087] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 582.280087] env[61972]: INFO nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 582.280254] env[61972]: DEBUG oslo.service.loopingcall [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.280484] env[61972]: DEBUG nova.compute.manager [-] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 582.280576] env[61972]: DEBUG nova.network.neutron [-] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 582.355935] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.367542] env[61972]: DEBUG nova.network.neutron [-] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.658276] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.775974] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac16bcb2-2e91-4f1c-92f8-ee9ee1f8cb03 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.785902] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7681b5-824a-4bd1-9601-d199d0045211 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.820265] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e5d5e6-054d-4e8c-af9a-94d7eed53786 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.828808] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e244dada-43a1-4db4-8f77-d512549bdb71 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.843600] env[61972]: DEBUG nova.compute.provider_tree [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.871069] env[61972]: DEBUG nova.network.neutron [-] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.163418] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-d32a7937-792a-4959-bded-819463472399" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.163958] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 583.164214] env[61972]: DEBUG nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 583.164481] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 583.203083] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.347489] env[61972]: DEBUG nova.scheduler.client.report [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 583.375051] env[61972]: INFO nova.compute.manager [-] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Took 1.09 seconds to deallocate network for instance. [ 583.380028] env[61972]: DEBUG nova.compute.claims [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 583.380028] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.700411] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 583.700411] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.700411] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.700411] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.700411] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.700411] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.700411] env[61972]: ERROR nova.compute.manager raise self.value [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.700411] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 583.700411] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.700411] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 583.701515] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.701515] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 583.701515] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 583.701515] env[61972]: ERROR nova.compute.manager [ 583.701515] env[61972]: Traceback (most recent call last): [ 583.701515] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 583.701515] env[61972]: listener.cb(fileno) [ 583.701515] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.701515] env[61972]: result = function(*args, **kwargs) [ 583.701515] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.701515] env[61972]: return func(*args, **kwargs) [ 583.701515] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.701515] env[61972]: raise e [ 583.701515] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.701515] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 583.701515] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.701515] env[61972]: created_port_ids = self._update_ports_for_instance( [ 583.701515] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.701515] env[61972]: with excutils.save_and_reraise_exception(): [ 583.701515] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.701515] env[61972]: self.force_reraise() [ 583.701515] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.701515] env[61972]: raise self.value [ 583.701515] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.701515] env[61972]: updated_port = self._update_port( [ 583.701515] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.701515] env[61972]: _ensure_no_port_binding_failure(port) [ 583.701515] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.701515] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 583.703624] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 583.703624] env[61972]: Removing descriptor: 21 [ 583.703624] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Traceback (most recent call last): [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] yield resources [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self.driver.spawn(context, instance, image_meta, [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.703624] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] vm_ref = self.build_virtual_machine(instance, [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] for vif in network_info: [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return self._sync_wrapper(fn, *args, **kwargs) [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self.wait() [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self[:] = self._gt.wait() [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return self._exit_event.wait() [ 583.704358] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] result = hub.switch() [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return self.greenlet.switch() [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] result = function(*args, **kwargs) [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return func(*args, **kwargs) [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] raise e [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] nwinfo = self.network_api.allocate_for_instance( [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.705058] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] created_port_ids = self._update_ports_for_instance( [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] with excutils.save_and_reraise_exception(): [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self.force_reraise() [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] raise self.value [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] updated_port = self._update_port( [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] _ensure_no_port_binding_failure(port) [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.705984] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] raise exception.PortBindingFailed(port_id=port['id']) [ 583.706468] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 583.706468] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] [ 583.706468] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Terminating instance [ 583.709260] env[61972]: DEBUG nova.network.neutron [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.859021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.054s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.859021] env[61972]: ERROR nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Traceback (most recent call last): [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self.driver.spawn(context, instance, image_meta, [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.859021] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] vm_ref = self.build_virtual_machine(instance, [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] for vif in network_info: [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return self._sync_wrapper(fn, *args, **kwargs) [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self.wait() [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self[:] = self._gt.wait() [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return self._exit_event.wait() [ 583.859448] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] result = hub.switch() [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return self.greenlet.switch() [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] result = function(*args, **kwargs) [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] return func(*args, **kwargs) [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] raise e [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] nwinfo = self.network_api.allocate_for_instance( [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.860246] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] created_port_ids = self._update_ports_for_instance( [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] with excutils.save_and_reraise_exception(): [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] self.force_reraise() [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] raise self.value [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] updated_port = self._update_port( [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] _ensure_no_port_binding_failure(port) [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.861114] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] raise exception.PortBindingFailed(port_id=port['id']) [ 583.861770] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] nova.exception.PortBindingFailed: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. [ 583.861770] env[61972]: ERROR nova.compute.manager [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] [ 583.861770] env[61972]: DEBUG nova.compute.utils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 583.861770] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.863s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.870277] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Build of instance fb28710d-cd15-41d4-b7aa-8389093ea9a8 was re-scheduled: Binding failed for port 2473edfc-d94b-4017-8570-fd43fee873db, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 583.870277] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 583.870277] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.870277] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquired lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.870899] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 584.055188] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquiring lock "503419e5-ae32-49d4-bc41-838fb3c9437e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.055583] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Lock "503419e5-ae32-49d4-bc41-838fb3c9437e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.080449] env[61972]: DEBUG nova.compute.manager [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Received event network-changed-409c65e1-161b-4663-9c58-b262e71200df {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 584.080574] env[61972]: DEBUG nova.compute.manager [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Refreshing instance network info cache due to event network-changed-409c65e1-161b-4663-9c58-b262e71200df. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 584.081069] env[61972]: DEBUG oslo_concurrency.lockutils [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] Acquiring lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.081069] env[61972]: DEBUG oslo_concurrency.lockutils [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] Acquired lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.081069] env[61972]: DEBUG nova.network.neutron [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Refreshing network info cache for port 409c65e1-161b-4663-9c58-b262e71200df {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 584.212026] env[61972]: INFO nova.compute.manager [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: d32a7937-792a-4959-bded-819463472399] Took 1.05 seconds to deallocate network for instance. [ 584.213355] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.399914] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.626732] env[61972]: DEBUG nova.network.neutron [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.637418] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.846380] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629fe266-7c3d-4e5e-8ee4-6b3642c1178f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.854867] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4683c92c-f2f8-42df-99fc-1ef397b78ed2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.890199] env[61972]: DEBUG nova.network.neutron [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.891840] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3ff400-295c-447a-87cd-afc1b67bd591 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.899403] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce91d13-90f2-4d4e-8b0d-835aa2da0058 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.916539] env[61972]: DEBUG nova.compute.provider_tree [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.139828] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Releasing lock "refresh_cache-fb28710d-cd15-41d4-b7aa-8389093ea9a8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.140101] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 585.140291] env[61972]: DEBUG nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 585.140458] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 585.167716] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.257459] env[61972]: INFO nova.scheduler.client.report [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted allocations for instance d32a7937-792a-4959-bded-819463472399 [ 585.395338] env[61972]: DEBUG oslo_concurrency.lockutils [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] Releasing lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.395614] env[61972]: DEBUG nova.compute.manager [req-5537f646-e0dd-4083-821e-0caa4e0d1455 req-3e6c83c7-2274-4b6b-b1b6-54ca4cfdf1b2 service nova] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Received event network-vif-deleted-409c65e1-161b-4663-9c58-b262e71200df {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 585.396170] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquired lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.396270] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.419714] env[61972]: DEBUG nova.scheduler.client.report [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 585.671810] env[61972]: DEBUG nova.network.neutron [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.772306] env[61972]: DEBUG oslo_concurrency.lockutils [None req-04d415ac-47a8-46cf-ae37-980d0d0c7407 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "d32a7937-792a-4959-bded-819463472399" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.976s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.925623] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.067s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.926829] env[61972]: ERROR nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Traceback (most recent call last): [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self.driver.spawn(context, instance, image_meta, [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] vm_ref = self.build_virtual_machine(instance, [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.926829] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] for vif in network_info: [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return self._sync_wrapper(fn, *args, **kwargs) [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self.wait() [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self[:] = self._gt.wait() [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return self._exit_event.wait() [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] result = hub.switch() [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.927424] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return self.greenlet.switch() [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] result = function(*args, **kwargs) [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] return func(*args, **kwargs) [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] raise e [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] nwinfo = self.network_api.allocate_for_instance( [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] created_port_ids = self._update_ports_for_instance( [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] with excutils.save_and_reraise_exception(): [ 585.928349] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] self.force_reraise() [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] raise self.value [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] updated_port = self._update_port( [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] _ensure_no_port_binding_failure(port) [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] raise exception.PortBindingFailed(port_id=port['id']) [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] nova.exception.PortBindingFailed: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. [ 585.928883] env[61972]: ERROR nova.compute.manager [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] [ 585.929256] env[61972]: DEBUG nova.compute.utils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 585.931687] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.929s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.935885] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Build of instance 7a7c98db-6ed4-4908-adc8-53347d693dca was re-scheduled: Binding failed for port 2079a444-eddf-4901-9beb-69ec949ef2ce, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 585.937366] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 585.937366] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquiring lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.937366] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Acquired lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.937366] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.941640] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.974293] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquiring lock "5300907c-d589-4ccf-a9c5-4a6bd819783b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.974545] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Lock "5300907c-d589-4ccf-a9c5-4a6bd819783b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.083942] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.175419] env[61972]: INFO nova.compute.manager [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: fb28710d-cd15-41d4-b7aa-8389093ea9a8] Took 1.03 seconds to deallocate network for instance. [ 586.281784] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 586.470201] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.588692] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Releasing lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.589040] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 586.589280] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.589524] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-043f3c30-2f7d-4a84-b44a-f4986bfad839 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.602790] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62aec99-e854-491e-8a02-16793db5dccb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.614991] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.633807] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ba985ad1-390d-4a2e-ad96-c273231f8549 could not be found. [ 586.634129] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.634384] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Took 0.05 seconds to destroy the instance on the hypervisor. [ 586.634611] env[61972]: DEBUG oslo.service.loopingcall [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.635444] env[61972]: DEBUG nova.compute.manager [-] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 586.635550] env[61972]: DEBUG nova.network.neutron [-] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.667581] env[61972]: DEBUG nova.network.neutron [-] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.818941] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.923711] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a3f7c0-79ce-4584-a530-3e7cf34f4850 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.936193] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca0f19f-f9fa-4fe5-b4fa-11638f368322 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.972895] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055e27e5-3d9c-4db4-ba4f-75fc7c94f536 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.980709] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a45eea0-4464-44b5-8e44-829a3b9cd2ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.996529] env[61972]: DEBUG nova.compute.provider_tree [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.120131] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Releasing lock "refresh_cache-7a7c98db-6ed4-4908-adc8-53347d693dca" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.120398] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 587.120579] env[61972]: DEBUG nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 587.120745] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.150255] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.173369] env[61972]: DEBUG nova.network.neutron [-] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.214034] env[61972]: INFO nova.scheduler.client.report [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Deleted allocations for instance fb28710d-cd15-41d4-b7aa-8389093ea9a8 [ 587.501125] env[61972]: DEBUG nova.scheduler.client.report [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 587.550020] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "92b06621-cdaa-4723-b339-c0f698897d24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.550020] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "92b06621-cdaa-4723-b339-c0f698897d24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.655202] env[61972]: DEBUG nova.network.neutron [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.681900] env[61972]: INFO nova.compute.manager [-] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Took 1.04 seconds to deallocate network for instance. [ 587.686339] env[61972]: DEBUG nova.compute.claims [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 587.686339] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.724216] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0409a62b-eac2-4166-ac75-b1e9b7a6ea8d tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "fb28710d-cd15-41d4-b7aa-8389093ea9a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.834s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.009031] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.075s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.009031] env[61972]: ERROR nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Traceback (most recent call last): [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self.driver.spawn(context, instance, image_meta, [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.009031] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] vm_ref = self.build_virtual_machine(instance, [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] for vif in network_info: [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return self._sync_wrapper(fn, *args, **kwargs) [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self.wait() [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self[:] = self._gt.wait() [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return self._exit_event.wait() [ 588.009620] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] result = hub.switch() [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return self.greenlet.switch() [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] result = function(*args, **kwargs) [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] return func(*args, **kwargs) [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] raise e [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] nwinfo = self.network_api.allocate_for_instance( [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 588.011035] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] created_port_ids = self._update_ports_for_instance( [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] with excutils.save_and_reraise_exception(): [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] self.force_reraise() [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] raise self.value [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] updated_port = self._update_port( [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] _ensure_no_port_binding_failure(port) [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.011422] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] raise exception.PortBindingFailed(port_id=port['id']) [ 588.011800] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] nova.exception.PortBindingFailed: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. [ 588.011800] env[61972]: ERROR nova.compute.manager [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] [ 588.011800] env[61972]: DEBUG nova.compute.utils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 588.011800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.633s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.017029] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Build of instance 24073cc5-cccd-4a1b-87d6-a8a6458251f9 was re-scheduled: Binding failed for port 07b03cac-6b8d-49a6-ab08-029bb9658a4e, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 588.017029] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 588.017029] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquiring lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.017029] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Acquired lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.017384] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 588.161494] env[61972]: INFO nova.compute.manager [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] [instance: 7a7c98db-6ed4-4908-adc8-53347d693dca] Took 1.04 seconds to deallocate network for instance. [ 588.229411] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 588.270198] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "50e4d9d3-a17b-4bb2-9816-bb44f269370e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.270198] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "50e4d9d3-a17b-4bb2-9816-bb44f269370e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.330156] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "22634f52-c696-417b-bfe9-0a7ca62aad40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.330375] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "22634f52-c696-417b-bfe9-0a7ca62aad40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.567322] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.755639] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.788290] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.994113] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40454934-cec1-40bf-8af4-f5fdd46ab96f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.003224] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de5c1e5-56e2-4d31-b717-8b3662f23c2e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.040759] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062695ad-9a92-47af-8c00-fa3ab16daa4a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.049394] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c585e0e0-8fea-42c6-8934-ce267ccf7927 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.065262] env[61972]: DEBUG nova.compute.provider_tree [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.206594] env[61972]: INFO nova.scheduler.client.report [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Deleted allocations for instance 7a7c98db-6ed4-4908-adc8-53347d693dca [ 589.298338] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Releasing lock "refresh_cache-24073cc5-cccd-4a1b-87d6-a8a6458251f9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.298580] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 589.298760] env[61972]: DEBUG nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 589.299058] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 589.446855] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "2ba9f652-c274-4d79-84a2-ad1384c99b91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.447625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "2ba9f652-c274-4d79-84a2-ad1384c99b91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.490209] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.569149] env[61972]: DEBUG nova.scheduler.client.report [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 589.715344] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592382d-a2db-4133-a572-1832e0c25c4b tempest-FloatingIPsAssociationTestJSON-2126815049 tempest-FloatingIPsAssociationTestJSON-2126815049-project-member] Lock "7a7c98db-6ed4-4908-adc8-53347d693dca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.808s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.995452] env[61972]: DEBUG nova.network.neutron [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.074432] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.065s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.075176] env[61972]: ERROR nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Traceback (most recent call last): [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self.driver.spawn(context, instance, image_meta, [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] vm_ref = self.build_virtual_machine(instance, [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.075176] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] for vif in network_info: [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return self._sync_wrapper(fn, *args, **kwargs) [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self.wait() [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self[:] = self._gt.wait() [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return self._exit_event.wait() [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] result = hub.switch() [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 590.075512] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return self.greenlet.switch() [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] result = function(*args, **kwargs) [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] return func(*args, **kwargs) [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] raise e [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] nwinfo = self.network_api.allocate_for_instance( [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] created_port_ids = self._update_ports_for_instance( [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] with excutils.save_and_reraise_exception(): [ 590.075963] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] self.force_reraise() [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] raise self.value [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] updated_port = self._update_port( [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] _ensure_no_port_binding_failure(port) [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] raise exception.PortBindingFailed(port_id=port['id']) [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] nova.exception.PortBindingFailed: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. [ 590.076349] env[61972]: ERROR nova.compute.manager [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] [ 590.076659] env[61972]: DEBUG nova.compute.utils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 590.077575] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.372s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.077878] env[61972]: DEBUG nova.objects.instance [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lazy-loading 'resources' on Instance uuid 72d434a7-ea70-4594-971f-7eec8ebea153 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 590.080070] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Build of instance 9e258f66-df7b-4acf-a066-ba66958a7861 was re-scheduled: Binding failed for port 47ef9d15-a729-4946-8b82-f4e24ae804d9, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 590.080883] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 590.080883] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquiring lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.081197] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Acquired lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.081197] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.220297] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 590.498578] env[61972]: INFO nova.compute.manager [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] [instance: 24073cc5-cccd-4a1b-87d6-a8a6458251f9] Took 1.20 seconds to deallocate network for instance. [ 590.622067] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.699075] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.756546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.053511] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d46d83-b388-4bca-9845-ccb21fdbf532 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.063603] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7444d2-2c8e-420d-9a15-3499da04f3dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.095573] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c51c2cd-72d0-41aa-8e12-c1af444b9c95 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.103195] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd72a232-13e8-4a3a-a8d5-7598258d556d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.116692] env[61972]: DEBUG nova.compute.provider_tree [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.205636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Releasing lock "refresh_cache-9e258f66-df7b-4acf-a066-ba66958a7861" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.206033] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 591.206113] env[61972]: DEBUG nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 591.206274] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 591.225036] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.538935] env[61972]: INFO nova.scheduler.client.report [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Deleted allocations for instance 24073cc5-cccd-4a1b-87d6-a8a6458251f9 [ 591.623446] env[61972]: DEBUG nova.scheduler.client.report [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 591.731259] env[61972]: DEBUG nova.network.neutron [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.054696] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cf755af-b83d-4d82-9b42-a953d87fcd2e tempest-ServerRescueTestJSONUnderV235-1539736891 tempest-ServerRescueTestJSONUnderV235-1539736891-project-member] Lock "24073cc5-cccd-4a1b-87d6-a8a6458251f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.094s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.127383] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.129709] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.994s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.166136] env[61972]: INFO nova.scheduler.client.report [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Deleted allocations for instance 72d434a7-ea70-4594-971f-7eec8ebea153 [ 592.237545] env[61972]: INFO nova.compute.manager [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] [instance: 9e258f66-df7b-4acf-a066-ba66958a7861] Took 1.03 seconds to deallocate network for instance. [ 592.557728] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 592.677609] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5379881d-c600-4fb6-a24d-e205c40e2430 tempest-ServerDiagnosticsV248Test-220293105 tempest-ServerDiagnosticsV248Test-220293105-project-member] Lock "72d434a7-ea70-4594-971f-7eec8ebea153" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.322s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.068016] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc15abb8-c044-47c4-8144-12ca396ef84f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.079657] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d59dbe-6577-4e95-bc29-23ada467b2bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.085436] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.112662] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea391fe6-aaa8-4499-a641-f613b8f301cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.120350] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9c2f8a-029b-4387-b66c-873b660bbeb9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.136224] env[61972]: DEBUG nova.compute.provider_tree [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.305692] env[61972]: INFO nova.scheduler.client.report [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Deleted allocations for instance 9e258f66-df7b-4acf-a066-ba66958a7861 [ 593.639102] env[61972]: DEBUG nova.scheduler.client.report [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 593.814909] env[61972]: DEBUG oslo_concurrency.lockutils [None req-66b28412-d8c5-4ced-a4b5-8c6e7ce6dc3e tempest-ServerDiagnosticsNegativeTest-312050512 tempest-ServerDiagnosticsNegativeTest-312050512-project-member] Lock "9e258f66-df7b-4acf-a066-ba66958a7861" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.616s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.144691] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.145361] env[61972]: ERROR nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Traceback (most recent call last): [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self.driver.spawn(context, instance, image_meta, [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] vm_ref = self.build_virtual_machine(instance, [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.145361] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] for vif in network_info: [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return self._sync_wrapper(fn, *args, **kwargs) [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self.wait() [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self[:] = self._gt.wait() [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return self._exit_event.wait() [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] result = hub.switch() [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.145682] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return self.greenlet.switch() [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] result = function(*args, **kwargs) [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] return func(*args, **kwargs) [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] raise e [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] nwinfo = self.network_api.allocate_for_instance( [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] created_port_ids = self._update_ports_for_instance( [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] with excutils.save_and_reraise_exception(): [ 594.146008] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] self.force_reraise() [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] raise self.value [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] updated_port = self._update_port( [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] _ensure_no_port_binding_failure(port) [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] raise exception.PortBindingFailed(port_id=port['id']) [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] nova.exception.PortBindingFailed: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. [ 594.146307] env[61972]: ERROR nova.compute.manager [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] [ 594.146586] env[61972]: DEBUG nova.compute.utils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 594.148046] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Build of instance 107b6153-65ad-48e4-9810-113bfacdd3d6 was re-scheduled: Binding failed for port 7ef17580-83f5-4048-999d-d75acd9e4f06, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 594.148784] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 594.149093] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquiring lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.149194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Acquired lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.149408] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.150394] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.399s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.320580] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 594.693027] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.840741] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.922704] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.102129] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cad035-4b07-46eb-b792-84c7877d3742 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.111478] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474a39d7-e868-40da-8827-87aa281a6705 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.140335] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec20f3e2-0ddf-4ed2-b92d-47cb57aef627 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.147566] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fca969-c388-4870-88af-97c701f077bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.160400] env[61972]: DEBUG nova.compute.provider_tree [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.425743] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Releasing lock "refresh_cache-107b6153-65ad-48e4-9810-113bfacdd3d6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.425743] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 595.425866] env[61972]: DEBUG nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 595.425964] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 595.462128] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.664074] env[61972]: DEBUG nova.scheduler.client.report [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 595.965072] env[61972]: DEBUG nova.network.neutron [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.172670] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.022s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.175101] env[61972]: ERROR nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Traceback (most recent call last): [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self.driver.spawn(context, instance, image_meta, [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] vm_ref = self.build_virtual_machine(instance, [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.175101] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] for vif in network_info: [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return self._sync_wrapper(fn, *args, **kwargs) [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self.wait() [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self[:] = self._gt.wait() [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return self._exit_event.wait() [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] result = hub.switch() [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.175382] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return self.greenlet.switch() [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] result = function(*args, **kwargs) [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] return func(*args, **kwargs) [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] raise e [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] nwinfo = self.network_api.allocate_for_instance( [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] created_port_ids = self._update_ports_for_instance( [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] with excutils.save_and_reraise_exception(): [ 596.175689] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] self.force_reraise() [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] raise self.value [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] updated_port = self._update_port( [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] _ensure_no_port_binding_failure(port) [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] raise exception.PortBindingFailed(port_id=port['id']) [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] nova.exception.PortBindingFailed: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. [ 596.175976] env[61972]: ERROR nova.compute.manager [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] [ 596.179225] env[61972]: DEBUG nova.compute.utils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 596.179225] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.945s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.179225] env[61972]: INFO nova.compute.claims [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.182453] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Build of instance 2795b001-aaf2-4886-bba7-bd764c29638c was re-scheduled: Binding failed for port 4f050603-36be-4dc6-902d-a372ebcf824a, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 596.183526] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 596.183870] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquiring lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.184152] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Acquired lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.184423] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.470770] env[61972]: INFO nova.compute.manager [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] [instance: 107b6153-65ad-48e4-9810-113bfacdd3d6] Took 1.04 seconds to deallocate network for instance. [ 596.712262] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.789953] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.292296] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Releasing lock "refresh_cache-2795b001-aaf2-4886-bba7-bd764c29638c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.292604] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 597.292604] env[61972]: DEBUG nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 597.292791] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.341792] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.512447] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.512700] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.522023] env[61972]: INFO nova.scheduler.client.report [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Deleted allocations for instance 107b6153-65ad-48e4-9810-113bfacdd3d6 [ 597.619170] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e53f141-d449-47ad-bb81-1f0fd985202f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.628016] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a75f2e5-c2a0-4ca3-b38f-a1db7f1718f2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.669422] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76a809b-b665-47a6-92d6-9361884e1974 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.682993] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe55245-f906-4807-8ece-fb4e2dfb0f3e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.701167] env[61972]: DEBUG nova.compute.provider_tree [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.846495] env[61972]: DEBUG nova.network.neutron [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.030366] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d164eb4c-bcda-4791-9824-03b505425ad2 tempest-DeleteServersAdminTestJSON-1007445574 tempest-DeleteServersAdminTestJSON-1007445574-project-member] Lock "107b6153-65ad-48e4-9810-113bfacdd3d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.771s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.205419] env[61972]: DEBUG nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 598.352539] env[61972]: INFO nova.compute.manager [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] [instance: 2795b001-aaf2-4886-bba7-bd764c29638c] Took 1.06 seconds to deallocate network for instance. [ 598.533234] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 598.713017] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.535s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.713017] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 598.714124] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.336s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.083806] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.213697] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquiring lock "e0d51c99-1916-4d66-a141-dfa5d4357174" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.214023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Lock "e0d51c99-1916-4d66-a141-dfa5d4357174" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.219720] env[61972]: DEBUG nova.compute.utils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 599.226721] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 599.227143] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.356900] env[61972]: DEBUG nova.policy [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77f085d6aafe41328b41f3c0ddc3079e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb754ac92c5648ed888fdf68ccd14ced', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 599.398299] env[61972]: INFO nova.scheduler.client.report [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Deleted allocations for instance 2795b001-aaf2-4886-bba7-bd764c29638c [ 599.695315] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1386e82-0437-4991-ad84-0859832ccb84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.703068] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6a3c07-4a33-48ab-b853-602530210276 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.737035] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 599.742411] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca6155c-eeb6-4183-b916-fe56a8278916 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.754072] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7a4f24-d9f9-41a7-9e3c-b7585102c220 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.769126] env[61972]: DEBUG nova.compute.provider_tree [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.917671] env[61972]: DEBUG oslo_concurrency.lockutils [None req-87b4f056-7a82-45fe-aad7-263a9533e9f2 tempest-ServerDiagnosticsTest-844285827 tempest-ServerDiagnosticsTest-844285827-project-member] Lock "2795b001-aaf2-4886-bba7-bd764c29638c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.313s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.188413] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "a978943b-afd3-44f4-b6c1-5a72dda8ca35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.188598] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "a978943b-afd3-44f4-b6c1-5a72dda8ca35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.274643] env[61972]: DEBUG nova.scheduler.client.report [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 600.348204] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Successfully created port: 10839b72-0e8a-4422-926c-68083c84bf75 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.420556] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 600.759442] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 600.782442] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.068s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.784834] env[61972]: ERROR nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Traceback (most recent call last): [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self.driver.spawn(context, instance, image_meta, [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] vm_ref = self.build_virtual_machine(instance, [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.784834] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] for vif in network_info: [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return self._sync_wrapper(fn, *args, **kwargs) [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self.wait() [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self[:] = self._gt.wait() [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return self._exit_event.wait() [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] result = hub.switch() [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.785127] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return self.greenlet.switch() [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] result = function(*args, **kwargs) [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] return func(*args, **kwargs) [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] raise e [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] nwinfo = self.network_api.allocate_for_instance( [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] created_port_ids = self._update_ports_for_instance( [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] with excutils.save_and_reraise_exception(): [ 600.785423] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] self.force_reraise() [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] raise self.value [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] updated_port = self._update_port( [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] _ensure_no_port_binding_failure(port) [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] raise exception.PortBindingFailed(port_id=port['id']) [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] nova.exception.PortBindingFailed: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. [ 600.785745] env[61972]: ERROR nova.compute.manager [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] [ 600.785978] env[61972]: DEBUG nova.compute.utils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.788372] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.968s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.792527] env[61972]: INFO nova.compute.claims [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.797812] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Build of instance 6858305a-6ab4-401d-ad1f-e6d21117d9e3 was re-scheduled: Binding failed for port 6ddf3917-6f4b-4b0b-9b61-19ef9c485793, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 600.798771] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 600.798901] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquiring lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.799055] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Acquired lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.799211] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.802037] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.805324] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.805324] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.805324] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.805324] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.805324] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.805723] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.805723] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.805723] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.805723] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.805723] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.807336] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa023fe-b02e-480b-87b4-551e04c18ee6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.814739] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f02f57-83bb-4b9e-9aa9-a07a506c51bd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.953560] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.344352] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.532223] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.994104] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 601.994104] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 601.994104] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.994104] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.994104] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.994104] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.994104] env[61972]: ERROR nova.compute.manager raise self.value [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.994104] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 601.994104] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.994104] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 601.994535] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.994535] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 601.994535] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 601.994535] env[61972]: ERROR nova.compute.manager [ 601.994739] env[61972]: Traceback (most recent call last): [ 601.994817] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 601.994817] env[61972]: listener.cb(fileno) [ 601.994884] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.994884] env[61972]: result = function(*args, **kwargs) [ 601.994943] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 601.994943] env[61972]: return func(*args, **kwargs) [ 601.995015] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 601.995015] env[61972]: raise e [ 601.995070] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 601.995070] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 601.995132] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.995132] env[61972]: created_port_ids = self._update_ports_for_instance( [ 601.996027] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.996027] env[61972]: with excutils.save_and_reraise_exception(): [ 601.996027] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.996027] env[61972]: self.force_reraise() [ 601.996027] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.996027] env[61972]: raise self.value [ 601.996027] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.996027] env[61972]: updated_port = self._update_port( [ 601.996027] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.996027] env[61972]: _ensure_no_port_binding_failure(port) [ 601.996027] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.996027] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 601.996027] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 601.996027] env[61972]: Removing descriptor: 19 [ 601.996458] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Traceback (most recent call last): [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] yield resources [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self.driver.spawn(context, instance, image_meta, [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] vm_ref = self.build_virtual_machine(instance, [ 601.996458] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] for vif in network_info: [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return self._sync_wrapper(fn, *args, **kwargs) [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self.wait() [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self[:] = self._gt.wait() [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return self._exit_event.wait() [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 601.996749] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] result = hub.switch() [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return self.greenlet.switch() [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] result = function(*args, **kwargs) [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return func(*args, **kwargs) [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] raise e [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] nwinfo = self.network_api.allocate_for_instance( [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] created_port_ids = self._update_ports_for_instance( [ 601.997066] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] with excutils.save_and_reraise_exception(): [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self.force_reraise() [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] raise self.value [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] updated_port = self._update_port( [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] _ensure_no_port_binding_failure(port) [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] raise exception.PortBindingFailed(port_id=port['id']) [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 601.997398] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] [ 601.997832] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Terminating instance [ 602.037403] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Releasing lock "refresh_cache-6858305a-6ab4-401d-ad1f-e6d21117d9e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.037934] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 602.038226] env[61972]: DEBUG nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 602.039091] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 602.059591] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.224355] env[61972]: DEBUG nova.compute.manager [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Received event network-changed-10839b72-0e8a-4422-926c-68083c84bf75 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 602.224631] env[61972]: DEBUG nova.compute.manager [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Refreshing instance network info cache due to event network-changed-10839b72-0e8a-4422-926c-68083c84bf75. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 602.224704] env[61972]: DEBUG oslo_concurrency.lockutils [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] Acquiring lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.224836] env[61972]: DEBUG oslo_concurrency.lockutils [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] Acquired lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.224983] env[61972]: DEBUG nova.network.neutron [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Refreshing network info cache for port 10839b72-0e8a-4422-926c-68083c84bf75 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 602.287273] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6971fc-7f1e-460d-9576-042c3530645a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.296656] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b59d56-f6bc-45c8-a14e-ff57265e8107 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.347786] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12a4527-4f0d-420c-b09c-8c33ac9c8464 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.353904] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquiring lock "e0735ee2-0a9d-4291-8465-b644816bf8e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.355544] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Lock "e0735ee2-0a9d-4291-8465-b644816bf8e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.359519] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ee0d37-844a-40bb-9f0b-869c6b4fb14d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.374513] env[61972]: DEBUG nova.compute.provider_tree [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.414805] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "56488ac6-c94b-4b40-9cad-b0c36a3d293e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.415066] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "56488ac6-c94b-4b40-9cad-b0c36a3d293e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.504965] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.562905] env[61972]: DEBUG nova.network.neutron [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.757069] env[61972]: DEBUG nova.network.neutron [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.852235] env[61972]: DEBUG nova.network.neutron [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.878372] env[61972]: DEBUG nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 603.067206] env[61972]: INFO nova.compute.manager [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] [instance: 6858305a-6ab4-401d-ad1f-e6d21117d9e3] Took 1.03 seconds to deallocate network for instance. [ 603.355504] env[61972]: DEBUG oslo_concurrency.lockutils [req-fee426e6-f67f-49f0-83f4-3ae719b096a6 req-0eed2d94-04ed-45da-b2aa-ba567b47f984 service nova] Releasing lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.355907] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquired lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.356260] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.383833] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.384418] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 603.388570] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.702s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.491789] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "2725d6ed-89d9-479f-b6ee-d16523e0abab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.492049] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "2725d6ed-89d9-479f-b6ee-d16523e0abab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.774269] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquiring lock "2b0039dd-1219-465d-beb8-0262e0e40029" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.774269] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Lock "2b0039dd-1219-465d-beb8-0262e0e40029" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.882343] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.892695] env[61972]: DEBUG nova.compute.utils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.896546] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 603.896814] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.965483] env[61972]: DEBUG nova.policy [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77f085d6aafe41328b41f3c0ddc3079e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb754ac92c5648ed888fdf68ccd14ced', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.052826] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.110185] env[61972]: INFO nova.scheduler.client.report [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Deleted allocations for instance 6858305a-6ab4-401d-ad1f-e6d21117d9e3 [ 604.331903] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3e4768-89e1-4bb8-8df6-7c2ed63d0a8c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.339369] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1a9b27-9a3c-45d3-b80f-b4d9bdf3cb83 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.376166] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed02714-34d4-4b16-9a24-55835eb7831e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.383534] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9426744e-d7eb-47fb-ba47-ccc74dcc0329 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.396734] env[61972]: DEBUG nova.compute.provider_tree [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.399781] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 604.444199] env[61972]: DEBUG nova.compute.manager [req-e0f13d1b-4573-44ee-9e94-ec26e331f16d req-6cb0f1a7-7a84-479c-8d2a-fec3d7ab6ba2 service nova] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Received event network-vif-deleted-10839b72-0e8a-4422-926c-68083c84bf75 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 604.497530] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Successfully created port: 2802b852-ca47-4063-87c9-cfe535e0cc35 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.555989] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Releasing lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.556479] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 604.556674] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 604.556974] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f13e51a1-96ac-43e4-be8b-ab352418a7c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.567302] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e452efd6-4124-46a1-b7c0-edb11866ab7f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.594726] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd could not be found. [ 604.595070] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 604.595170] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 604.595409] env[61972]: DEBUG oslo.service.loopingcall [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.595629] env[61972]: DEBUG nova.compute.manager [-] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 604.595722] env[61972]: DEBUG nova.network.neutron [-] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 604.621817] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2d33f3d-aaaf-416f-ac2c-05a6de35287c tempest-ImagesOneServerNegativeTestJSON-562931961 tempest-ImagesOneServerNegativeTestJSON-562931961-project-member] Lock "6858305a-6ab4-401d-ad1f-e6d21117d9e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.759s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.634700] env[61972]: DEBUG nova.network.neutron [-] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.900122] env[61972]: DEBUG nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 605.125561] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 605.135379] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquiring lock "7801858d-bc2a-466e-a6f2-a8c6b6ff4705" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.135615] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Lock "7801858d-bc2a-466e-a6f2-a8c6b6ff4705" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.137562] env[61972]: DEBUG nova.network.neutron [-] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.411785] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.024s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.412470] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Traceback (most recent call last): [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self.driver.spawn(context, instance, image_meta, [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] vm_ref = self.build_virtual_machine(instance, [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.412470] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] for vif in network_info: [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return self._sync_wrapper(fn, *args, **kwargs) [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self.wait() [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self[:] = self._gt.wait() [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return self._exit_event.wait() [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] result = hub.switch() [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.412771] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return self.greenlet.switch() [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] result = function(*args, **kwargs) [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] return func(*args, **kwargs) [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] raise e [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] nwinfo = self.network_api.allocate_for_instance( [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] created_port_ids = self._update_ports_for_instance( [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] with excutils.save_and_reraise_exception(): [ 605.413133] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] self.force_reraise() [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] raise self.value [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] updated_port = self._update_port( [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] _ensure_no_port_binding_failure(port) [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] raise exception.PortBindingFailed(port_id=port['id']) [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] nova.exception.PortBindingFailed: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. [ 605.413412] env[61972]: ERROR nova.compute.manager [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] [ 605.413709] env[61972]: DEBUG nova.compute.utils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 605.415510] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 605.417829] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.662s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.419670] env[61972]: INFO nova.compute.claims [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.423820] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Build of instance ba985ad1-390d-4a2e-ad96-c273231f8549 was re-scheduled: Binding failed for port 409c65e1-161b-4663-9c58-b262e71200df, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 605.423820] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 605.423820] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.423820] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquired lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.424055] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.456949] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.457337] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.458236] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.458660] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.459026] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.459355] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.461026] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.461026] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.461026] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.461026] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.461026] env[61972]: DEBUG nova.virt.hardware [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.461695] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483796d3-d259-47fe-9054-f2f2f8e470df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.471215] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c559fda-166c-413f-8c8e-771facdb677a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.642624] env[61972]: INFO nova.compute.manager [-] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Took 1.05 seconds to deallocate network for instance. [ 605.650136] env[61972]: DEBUG nova.compute.claims [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 605.650270] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.661059] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.952255] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.014460] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 606.014460] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.014460] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.014460] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.014460] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.014460] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.014460] env[61972]: ERROR nova.compute.manager raise self.value [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.014460] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 606.014460] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.014460] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 606.015402] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.015402] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 606.015402] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 606.015402] env[61972]: ERROR nova.compute.manager [ 606.015402] env[61972]: Traceback (most recent call last): [ 606.015402] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 606.015402] env[61972]: listener.cb(fileno) [ 606.015402] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.015402] env[61972]: result = function(*args, **kwargs) [ 606.015402] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.015402] env[61972]: return func(*args, **kwargs) [ 606.015402] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 606.015402] env[61972]: raise e [ 606.015402] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.015402] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 606.015402] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.015402] env[61972]: created_port_ids = self._update_ports_for_instance( [ 606.015402] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.015402] env[61972]: with excutils.save_and_reraise_exception(): [ 606.015402] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.015402] env[61972]: self.force_reraise() [ 606.015402] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.015402] env[61972]: raise self.value [ 606.015402] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.015402] env[61972]: updated_port = self._update_port( [ 606.015402] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.015402] env[61972]: _ensure_no_port_binding_failure(port) [ 606.015402] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.015402] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 606.016405] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 606.016405] env[61972]: Removing descriptor: 21 [ 606.016405] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Traceback (most recent call last): [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] yield resources [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self.driver.spawn(context, instance, image_meta, [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.016405] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] vm_ref = self.build_virtual_machine(instance, [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] for vif in network_info: [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return self._sync_wrapper(fn, *args, **kwargs) [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self.wait() [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self[:] = self._gt.wait() [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return self._exit_event.wait() [ 606.016823] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] result = hub.switch() [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return self.greenlet.switch() [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] result = function(*args, **kwargs) [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return func(*args, **kwargs) [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] raise e [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] nwinfo = self.network_api.allocate_for_instance( [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.018191] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] created_port_ids = self._update_ports_for_instance( [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] with excutils.save_and_reraise_exception(): [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self.force_reraise() [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] raise self.value [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] updated_port = self._update_port( [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] _ensure_no_port_binding_failure(port) [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.019078] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] raise exception.PortBindingFailed(port_id=port['id']) [ 606.019491] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 606.019491] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] [ 606.019491] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Terminating instance [ 606.063469] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.482581] env[61972]: DEBUG nova.compute.manager [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Received event network-changed-2802b852-ca47-4063-87c9-cfe535e0cc35 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 606.482790] env[61972]: DEBUG nova.compute.manager [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Refreshing instance network info cache due to event network-changed-2802b852-ca47-4063-87c9-cfe535e0cc35. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 606.483016] env[61972]: DEBUG oslo_concurrency.lockutils [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] Acquiring lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.483195] env[61972]: DEBUG oslo_concurrency.lockutils [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] Acquired lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.483347] env[61972]: DEBUG nova.network.neutron [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Refreshing network info cache for port 2802b852-ca47-4063-87c9-cfe535e0cc35 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.522213] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.566905] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Releasing lock "refresh_cache-ba985ad1-390d-4a2e-ad96-c273231f8549" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.567143] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 606.567321] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 606.568821] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.592884] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.000236] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3cad0f-f58d-41af-9f60-d2bad2594ec9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.008376] env[61972]: DEBUG nova.network.neutron [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.011448] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b474416-082b-483c-8c0b-8a3b29a85d3b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.690170] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.694783] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453dbc02-93e2-470e-94e4-8690f59cc633 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.705186] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ee54e7-7f8d-4e48-a6d6-ca762b1817e8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.718753] env[61972]: DEBUG nova.compute.provider_tree [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.766872] env[61972]: DEBUG nova.network.neutron [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.195668] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: ba985ad1-390d-4a2e-ad96-c273231f8549] Took 1.63 seconds to deallocate network for instance. [ 608.222224] env[61972]: DEBUG nova.scheduler.client.report [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 608.269686] env[61972]: DEBUG oslo_concurrency.lockutils [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] Releasing lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.269987] env[61972]: DEBUG nova.compute.manager [req-1370aae6-e7a9-4b71-af80-c8bc086b7b4e req-a1f1a3ac-333d-45cd-9b81-bf932effb38e service nova] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Received event network-vif-deleted-2802b852-ca47-4063-87c9-cfe535e0cc35 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 608.270350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquired lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.270514] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.727356] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.309s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.727835] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 608.730353] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.974s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.732530] env[61972]: INFO nova.compute.claims [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.803974] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.979105] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.236722] env[61972]: DEBUG nova.compute.utils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 609.243017] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 609.243017] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 609.247554] env[61972]: INFO nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Deleted allocations for instance ba985ad1-390d-4a2e-ad96-c273231f8549 [ 609.294094] env[61972]: DEBUG nova.policy [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a05628d5d2d54088833dc99dd43fcfa8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a69b02d028a4a069ad5d7f80ccb527f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 609.482980] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Releasing lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.483137] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 609.483351] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.487029] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bd61a73-1ff6-4c9b-b6bd-78542bfdcb86 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.492801] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d33a1b8-478f-4d65-bb9d-f0ce4d650a6e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.515041] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56b1ea80-3109-4212-959b-0e5fb2fc66d3 could not be found. [ 609.515284] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 609.515461] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 609.515696] env[61972]: DEBUG oslo.service.loopingcall [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.515915] env[61972]: DEBUG nova.compute.manager [-] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 609.516031] env[61972]: DEBUG nova.network.neutron [-] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 609.544781] env[61972]: DEBUG nova.network.neutron [-] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.674561] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Successfully created port: a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.745190] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 609.757920] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "ba985ad1-390d-4a2e-ad96-c273231f8549" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.478s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.048396] env[61972]: DEBUG nova.network.neutron [-] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.215813] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541e006c-204e-4aca-be1a-602bae02f6a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.223295] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397d1ba6-60f7-4a65-9d61-085a271e48b3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.260019] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8eda7c-fd65-4e3f-82f6-08dcd6a08c4e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.262830] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 610.271795] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bc374e-1795-4535-85c9-8f0208e41a1c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.289743] env[61972]: DEBUG nova.compute.provider_tree [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.550968] env[61972]: INFO nova.compute.manager [-] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Took 1.03 seconds to deallocate network for instance. [ 610.553772] env[61972]: DEBUG nova.compute.claims [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.553949] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.764336] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 610.796218] env[61972]: DEBUG nova.scheduler.client.report [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 610.808413] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.814988] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 610.815297] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 610.815406] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.815966] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 610.815966] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.816305] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 610.816896] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 610.816896] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 610.816896] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 610.816997] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 610.817193] env[61972]: DEBUG nova.virt.hardware [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 610.818073] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae141295-6f9b-4bdc-9705-90da0fa7a25e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.830007] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bfb443-c4d5-42a0-ae66-48e161cffb93 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.311153] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.312619] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 611.315264] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.230s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.317054] env[61972]: INFO nova.compute.claims [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.814884] env[61972]: DEBUG nova.compute.manager [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Received event network-changed-a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 611.815116] env[61972]: DEBUG nova.compute.manager [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Refreshing instance network info cache due to event network-changed-a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 611.815328] env[61972]: DEBUG oslo_concurrency.lockutils [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] Acquiring lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.815467] env[61972]: DEBUG oslo_concurrency.lockutils [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] Acquired lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.815626] env[61972]: DEBUG nova.network.neutron [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Refreshing network info cache for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.821450] env[61972]: DEBUG nova.compute.utils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.826701] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 611.828977] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.940497] env[61972]: DEBUG nova.policy [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c90c43e7f554d259c9f8a65d3e797dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '871c1d0ed74c4c46861d512087263041', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 612.162910] env[61972]: ERROR nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 612.162910] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.162910] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.162910] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.162910] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.162910] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.162910] env[61972]: ERROR nova.compute.manager raise self.value [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.162910] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 612.162910] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.162910] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 612.163350] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.163350] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 612.163350] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 612.163350] env[61972]: ERROR nova.compute.manager [ 612.163350] env[61972]: Traceback (most recent call last): [ 612.163350] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 612.163350] env[61972]: listener.cb(fileno) [ 612.163350] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.163350] env[61972]: result = function(*args, **kwargs) [ 612.163350] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.163350] env[61972]: return func(*args, **kwargs) [ 612.163350] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 612.163350] env[61972]: raise e [ 612.163350] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.163350] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 612.163350] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.163350] env[61972]: created_port_ids = self._update_ports_for_instance( [ 612.163350] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.163350] env[61972]: with excutils.save_and_reraise_exception(): [ 612.163350] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.163350] env[61972]: self.force_reraise() [ 612.163350] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.163350] env[61972]: raise self.value [ 612.163350] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.163350] env[61972]: updated_port = self._update_port( [ 612.163350] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.163350] env[61972]: _ensure_no_port_binding_failure(port) [ 612.163350] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.163350] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 612.163998] env[61972]: nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 612.163998] env[61972]: Removing descriptor: 19 [ 612.163998] env[61972]: ERROR nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Traceback (most recent call last): [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] yield resources [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self.driver.spawn(context, instance, image_meta, [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.163998] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] vm_ref = self.build_virtual_machine(instance, [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] for vif in network_info: [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return self._sync_wrapper(fn, *args, **kwargs) [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self.wait() [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self[:] = self._gt.wait() [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return self._exit_event.wait() [ 612.164629] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] result = hub.switch() [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return self.greenlet.switch() [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] result = function(*args, **kwargs) [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return func(*args, **kwargs) [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] raise e [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] nwinfo = self.network_api.allocate_for_instance( [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.164985] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] created_port_ids = self._update_ports_for_instance( [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] with excutils.save_and_reraise_exception(): [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self.force_reraise() [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] raise self.value [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] updated_port = self._update_port( [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] _ensure_no_port_binding_failure(port) [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.165339] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] raise exception.PortBindingFailed(port_id=port['id']) [ 612.165634] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 612.165634] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] [ 612.165634] env[61972]: INFO nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Terminating instance [ 612.334489] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 612.341021] env[61972]: DEBUG nova.network.neutron [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.498222] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Successfully created port: 61075218-709a-43a5-a0f4-4f4c50ec0251 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.668582] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquiring lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.702052] env[61972]: DEBUG nova.network.neutron [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.820220] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55016c52-c909-4d3a-b951-006ac6681198 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.833550] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c8fe0a-387d-4024-bba0-560be4d1e37d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.870107] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615b6653-a4e5-4ba4-8e60-1c9480eaf64d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.876678] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee86da6-93e7-4c2f-b89d-bf9c522e88da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.896489] env[61972]: DEBUG nova.compute.provider_tree [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.210738] env[61972]: DEBUG oslo_concurrency.lockutils [req-21d5aeeb-80f2-4a71-948a-4cdd0c48e80f req-ec6e5fba-f161-4690-a0f5-86e26090ece9 service nova] Releasing lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.211212] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquired lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.211397] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.371041] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 613.400298] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.400548] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.400743] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.400925] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.401090] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.401242] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.401450] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.401649] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.401831] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.402014] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.402260] env[61972]: DEBUG nova.virt.hardware [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.403079] env[61972]: DEBUG nova.scheduler.client.report [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 613.409279] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eac978b-9fa8-4520-9f9f-b2d80f9a0b94 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.421520] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4414e35-40fc-4615-a659-b2b04659ab9d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.752584] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.824561] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquiring lock "dc5ef08a-8692-4274-84df-7c2923099249" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.824561] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Lock "dc5ef08a-8692-4274-84df-7c2923099249" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.912381] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.912381] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 613.914270] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.074s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.918381] env[61972]: INFO nova.compute.claims [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.932261] env[61972]: DEBUG nova.compute.manager [req-b00da235-379c-4889-a8ac-621f4f239c2f req-e81e62f4-42cc-4213-b1aa-206bb9c59f0c service nova] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Received event network-vif-deleted-a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 613.952526] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.244877] env[61972]: ERROR nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 614.244877] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 614.244877] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.244877] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.244877] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.244877] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.244877] env[61972]: ERROR nova.compute.manager raise self.value [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.244877] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 614.244877] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.244877] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 614.245297] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.245297] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 614.245297] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 614.245297] env[61972]: ERROR nova.compute.manager [ 614.245297] env[61972]: Traceback (most recent call last): [ 614.245297] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 614.245297] env[61972]: listener.cb(fileno) [ 614.245297] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.245297] env[61972]: result = function(*args, **kwargs) [ 614.245297] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.245297] env[61972]: return func(*args, **kwargs) [ 614.245297] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 614.245297] env[61972]: raise e [ 614.245297] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 614.245297] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 614.245297] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.245297] env[61972]: created_port_ids = self._update_ports_for_instance( [ 614.245297] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.245297] env[61972]: with excutils.save_and_reraise_exception(): [ 614.245297] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.245297] env[61972]: self.force_reraise() [ 614.245297] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.245297] env[61972]: raise self.value [ 614.245297] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.245297] env[61972]: updated_port = self._update_port( [ 614.245297] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.245297] env[61972]: _ensure_no_port_binding_failure(port) [ 614.245297] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.245297] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 614.245949] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 614.245949] env[61972]: Removing descriptor: 21 [ 614.246373] env[61972]: ERROR nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Traceback (most recent call last): [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] yield resources [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self.driver.spawn(context, instance, image_meta, [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] vm_ref = self.build_virtual_machine(instance, [ 614.246373] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] for vif in network_info: [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return self._sync_wrapper(fn, *args, **kwargs) [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self.wait() [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self[:] = self._gt.wait() [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return self._exit_event.wait() [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 614.246734] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] result = hub.switch() [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return self.greenlet.switch() [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] result = function(*args, **kwargs) [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return func(*args, **kwargs) [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] raise e [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] nwinfo = self.network_api.allocate_for_instance( [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] created_port_ids = self._update_ports_for_instance( [ 614.247103] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] with excutils.save_and_reraise_exception(): [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self.force_reraise() [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] raise self.value [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] updated_port = self._update_port( [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] _ensure_no_port_binding_failure(port) [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] raise exception.PortBindingFailed(port_id=port['id']) [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 614.247570] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] [ 614.247857] env[61972]: INFO nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Terminating instance [ 614.426307] env[61972]: DEBUG nova.compute.utils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 614.431027] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 614.431027] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 614.454877] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Releasing lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.456227] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 614.456697] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.459025] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19016100-16be-4a21-81a1-9fd6ecfd4a20 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.468307] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fb47aa-c46e-41f3-a0c2-481e3bbdcbbc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.493942] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 489fc6c6-c9a5-40a8-81a4-7677f55743fe could not be found. [ 614.494158] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.494332] env[61972]: INFO nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 614.494605] env[61972]: DEBUG oslo.service.loopingcall [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.495983] env[61972]: DEBUG nova.policy [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f8c590d73094f749537e31f4d03229a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09219842acaa4b948356f5a1d9ed4ccf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 614.497350] env[61972]: DEBUG nova.compute.manager [-] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 614.497439] env[61972]: DEBUG nova.network.neutron [-] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.561198] env[61972]: DEBUG nova.network.neutron [-] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.752435] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.752675] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquired lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.753056] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.908791] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Successfully created port: d59aa4aa-ae41-40dd-a250-43f0ba448dc2 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.931218] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 615.066412] env[61972]: DEBUG nova.network.neutron [-] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.282041] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.427612] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114c3f0d-dea5-47fa-8d57-c7baa8724bab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.430889] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.436961] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f5121a-238d-4cbc-9752-8180e0fadad4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.471408] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aa573d-10ce-446f-9c5d-73602003f7d5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.477474] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed0c403-f948-4838-86c1-18db7ffd1095 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.491228] env[61972]: DEBUG nova.compute.provider_tree [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.575474] env[61972]: INFO nova.compute.manager [-] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Took 1.08 seconds to deallocate network for instance. [ 615.576538] env[61972]: DEBUG nova.compute.claims [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.576737] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.933865] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Releasing lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.934313] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 615.934647] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 615.935286] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca06da13-7a5a-4923-9f39-c66537850138 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.944186] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd19716-b786-409e-88c9-242f75567fdf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.960757] env[61972]: DEBUG nova.compute.manager [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Received event network-changed-61075218-709a-43a5-a0f4-4f4c50ec0251 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 615.960981] env[61972]: DEBUG nova.compute.manager [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Refreshing instance network info cache due to event network-changed-61075218-709a-43a5-a0f4-4f4c50ec0251. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 615.961201] env[61972]: DEBUG oslo_concurrency.lockutils [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] Acquiring lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.961307] env[61972]: DEBUG oslo_concurrency.lockutils [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] Acquired lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.961459] env[61972]: DEBUG nova.network.neutron [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Refreshing network info cache for port 61075218-709a-43a5-a0f4-4f4c50ec0251 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.968605] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf7ea49c-91ff-4c81-803c-90608c2849dc could not be found. [ 615.968733] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.968896] env[61972]: INFO nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Took 0.03 seconds to destroy the instance on the hypervisor. [ 615.969159] env[61972]: DEBUG oslo.service.loopingcall [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.969970] env[61972]: DEBUG nova.compute.manager [-] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 615.970513] env[61972]: DEBUG nova.network.neutron [-] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.974142] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 615.993720] env[61972]: DEBUG nova.scheduler.client.report [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 615.998168] env[61972]: DEBUG nova.network.neutron [-] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.003705] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 616.004114] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 616.004114] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.004306] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 616.004448] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.004590] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 616.004789] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 616.004943] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 616.005119] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 616.005386] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 616.005556] env[61972]: DEBUG nova.virt.hardware [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.006805] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d312d4a-caa9-4d04-842f-c8c82658227d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.016180] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7eb0726-40c0-41c9-bf3e-8766f348fd92 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.190381] env[61972]: ERROR nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 616.190381] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 616.190381] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.190381] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.190381] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.190381] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.190381] env[61972]: ERROR nova.compute.manager raise self.value [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.190381] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 616.190381] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.190381] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 616.190861] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.190861] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 616.190861] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 616.190861] env[61972]: ERROR nova.compute.manager [ 616.190861] env[61972]: Traceback (most recent call last): [ 616.190861] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 616.190861] env[61972]: listener.cb(fileno) [ 616.190861] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.190861] env[61972]: result = function(*args, **kwargs) [ 616.190861] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.190861] env[61972]: return func(*args, **kwargs) [ 616.190861] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 616.190861] env[61972]: raise e [ 616.190861] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 616.190861] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 616.190861] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.190861] env[61972]: created_port_ids = self._update_ports_for_instance( [ 616.190861] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.190861] env[61972]: with excutils.save_and_reraise_exception(): [ 616.190861] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.190861] env[61972]: self.force_reraise() [ 616.190861] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.190861] env[61972]: raise self.value [ 616.190861] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.190861] env[61972]: updated_port = self._update_port( [ 616.190861] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.190861] env[61972]: _ensure_no_port_binding_failure(port) [ 616.190861] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.190861] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 616.191777] env[61972]: nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 616.191777] env[61972]: Removing descriptor: 21 [ 616.192013] env[61972]: ERROR nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Traceback (most recent call last): [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] yield resources [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self.driver.spawn(context, instance, image_meta, [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] vm_ref = self.build_virtual_machine(instance, [ 616.192013] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] for vif in network_info: [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return self._sync_wrapper(fn, *args, **kwargs) [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self.wait() [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self[:] = self._gt.wait() [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return self._exit_event.wait() [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 616.192300] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] result = hub.switch() [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return self.greenlet.switch() [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] result = function(*args, **kwargs) [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return func(*args, **kwargs) [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] raise e [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] nwinfo = self.network_api.allocate_for_instance( [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] created_port_ids = self._update_ports_for_instance( [ 616.192725] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] with excutils.save_and_reraise_exception(): [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self.force_reraise() [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] raise self.value [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] updated_port = self._update_port( [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] _ensure_no_port_binding_failure(port) [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] raise exception.PortBindingFailed(port_id=port['id']) [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 616.193046] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] [ 616.193384] env[61972]: INFO nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Terminating instance [ 616.498854] env[61972]: DEBUG nova.network.neutron [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.502452] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.502452] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 616.504890] env[61972]: DEBUG nova.network.neutron [-] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.506161] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.422s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.509189] env[61972]: INFO nova.compute.claims [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.600894] env[61972]: DEBUG nova.network.neutron [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.698092] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquiring lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.698982] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquired lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.698982] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.007308] env[61972]: DEBUG nova.compute.utils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.008710] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 617.008920] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.013130] env[61972]: INFO nova.compute.manager [-] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Took 1.04 seconds to deallocate network for instance. [ 617.015343] env[61972]: DEBUG nova.compute.claims [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 617.015515] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.048177] env[61972]: DEBUG nova.policy [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '423b480cd7264efaaa2b1e35e3a70f1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afeb65a860024d6f8e8371d599517f10', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 617.104614] env[61972]: DEBUG oslo_concurrency.lockutils [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] Releasing lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.104890] env[61972]: DEBUG nova.compute.manager [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Received event network-vif-deleted-61075218-709a-43a5-a0f4-4f4c50ec0251 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 617.105100] env[61972]: DEBUG nova.compute.manager [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Received event network-changed-d59aa4aa-ae41-40dd-a250-43f0ba448dc2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 617.105324] env[61972]: DEBUG nova.compute.manager [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Refreshing instance network info cache due to event network-changed-d59aa4aa-ae41-40dd-a250-43f0ba448dc2. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 617.105436] env[61972]: DEBUG oslo_concurrency.lockutils [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] Acquiring lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.247291] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.444624] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.507017] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Successfully created port: f086ddb3-723b-468a-a58f-fa7730f53cae {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.513571] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 617.947537] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Releasing lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.948022] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 617.948235] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 617.948563] env[61972]: DEBUG oslo_concurrency.lockutils [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] Acquired lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.948756] env[61972]: DEBUG nova.network.neutron [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Refreshing network info cache for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 617.949950] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a74b2f7e-81aa-493e-bbb2-df192ce6aca7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.967495] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc45cc1-2e8d-49a4-8c4d-0f6f65b77e53 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.995223] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81775c2c-328e-4e33-8ff7-40a9f638ec76 could not be found. [ 617.995521] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 617.995803] env[61972]: INFO nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Took 0.05 seconds to destroy the instance on the hypervisor. [ 617.996026] env[61972]: DEBUG oslo.service.loopingcall [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.999301] env[61972]: DEBUG nova.compute.manager [-] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 617.999460] env[61972]: DEBUG nova.network.neutron [-] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.029213] env[61972]: DEBUG nova.network.neutron [-] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.046507] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e843442-0a84-4606-8220-215dd3f9f835 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.056564] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801a83ad-cb6d-4554-bb64-5179c7bdb5c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.089170] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25e206d-542d-46a8-85b7-dbe3fe967b2d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.093384] env[61972]: DEBUG nova.compute.manager [req-59bd2b86-ad61-4e78-b000-1bba28338cf5 req-745d58d7-f382-493d-bd3e-f56143cbfbf3 service nova] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Received event network-vif-deleted-d59aa4aa-ae41-40dd-a250-43f0ba448dc2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 618.099026] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efea639-d4aa-493a-af67-a4201c87fd4a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.115689] env[61972]: DEBUG nova.compute.provider_tree [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.485551] env[61972]: DEBUG nova.network.neutron [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.532724] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 618.534906] env[61972]: DEBUG nova.network.neutron [-] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.570950] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.571216] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.571372] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.571550] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.572342] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.572538] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.572755] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.573807] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.573807] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.573917] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.574079] env[61972]: DEBUG nova.virt.hardware [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.574909] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e3443b-9d59-47e1-bd22-49775d872ecb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.584254] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad06e088-5643-4fe8-92c3-ebece9710be4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.622124] env[61972]: DEBUG nova.scheduler.client.report [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 618.637185] env[61972]: DEBUG nova.network.neutron [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.042025] env[61972]: INFO nova.compute.manager [-] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Took 1.04 seconds to deallocate network for instance. [ 619.045634] env[61972]: DEBUG nova.compute.claims [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 619.045902] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.051613] env[61972]: ERROR nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 619.051613] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 619.051613] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.051613] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.051613] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.051613] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.051613] env[61972]: ERROR nova.compute.manager raise self.value [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.051613] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 619.051613] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.051613] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 619.052079] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.052079] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 619.052079] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 619.052079] env[61972]: ERROR nova.compute.manager [ 619.052079] env[61972]: Traceback (most recent call last): [ 619.052079] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 619.052079] env[61972]: listener.cb(fileno) [ 619.052079] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.052079] env[61972]: result = function(*args, **kwargs) [ 619.052079] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 619.052079] env[61972]: return func(*args, **kwargs) [ 619.052079] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 619.052079] env[61972]: raise e [ 619.052079] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 619.052079] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 619.052079] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.052079] env[61972]: created_port_ids = self._update_ports_for_instance( [ 619.052079] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.052079] env[61972]: with excutils.save_and_reraise_exception(): [ 619.052079] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.052079] env[61972]: self.force_reraise() [ 619.052079] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.052079] env[61972]: raise self.value [ 619.052079] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.052079] env[61972]: updated_port = self._update_port( [ 619.052079] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.052079] env[61972]: _ensure_no_port_binding_failure(port) [ 619.052079] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.052079] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 619.052811] env[61972]: nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 619.052811] env[61972]: Removing descriptor: 21 [ 619.052811] env[61972]: ERROR nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Traceback (most recent call last): [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] yield resources [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self.driver.spawn(context, instance, image_meta, [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.052811] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] vm_ref = self.build_virtual_machine(instance, [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] for vif in network_info: [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return self._sync_wrapper(fn, *args, **kwargs) [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self.wait() [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self[:] = self._gt.wait() [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return self._exit_event.wait() [ 619.053097] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] result = hub.switch() [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return self.greenlet.switch() [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] result = function(*args, **kwargs) [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return func(*args, **kwargs) [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] raise e [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] nwinfo = self.network_api.allocate_for_instance( [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.053421] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] created_port_ids = self._update_ports_for_instance( [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] with excutils.save_and_reraise_exception(): [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self.force_reraise() [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] raise self.value [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] updated_port = self._update_port( [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] _ensure_no_port_binding_failure(port) [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.053722] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] raise exception.PortBindingFailed(port_id=port['id']) [ 619.053987] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 619.053987] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] [ 619.053987] env[61972]: INFO nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Terminating instance [ 619.125314] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.125857] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 619.128518] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.175s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.130351] env[61972]: INFO nova.compute.claims [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.141036] env[61972]: DEBUG oslo_concurrency.lockutils [req-43600098-57c4-4c82-83f6-06fee978f27f req-567ca3dd-2b66-4632-b515-3b10a6437f00 service nova] Releasing lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.558780] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquiring lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.559061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquired lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.559189] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.635457] env[61972]: DEBUG nova.compute.utils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.638868] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 619.639079] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 619.704226] env[61972]: DEBUG nova.policy [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e1217449554a459d4f5cb72bfc578f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b80714a72c14aef842c7b02001edd92', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 619.983539] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.983796] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.032449] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Successfully created port: 12773b1b-c8da-4e56-8919-b5a36759edc7 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.084218] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.139998] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 620.156173] env[61972]: DEBUG nova.compute.manager [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Received event network-changed-f086ddb3-723b-468a-a58f-fa7730f53cae {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 620.156363] env[61972]: DEBUG nova.compute.manager [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Refreshing instance network info cache due to event network-changed-f086ddb3-723b-468a-a58f-fa7730f53cae. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 620.156543] env[61972]: DEBUG oslo_concurrency.lockutils [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] Acquiring lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.266716] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.492644] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.492865] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 620.492995] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 620.596558] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26438d14-8fd5-44d5-9cc3-e487ba5f1855 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.604451] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf5b6b-c25c-441b-8429-764c840d49e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.636629] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf46b68-9c9b-4253-8ea8-b8aa7c6adbd5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.645960] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c12852-3a63-44da-a7ac-c576bf89ccea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.666137] env[61972]: DEBUG nova.compute.provider_tree [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.773940] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Releasing lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.774154] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 620.774355] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.774738] env[61972]: DEBUG oslo_concurrency.lockutils [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] Acquired lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.774930] env[61972]: DEBUG nova.network.neutron [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Refreshing network info cache for port f086ddb3-723b-468a-a58f-fa7730f53cae {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.776281] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f93015f1-1586-4ce2-b327-0e00ab32c077 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.785507] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4d442c-571b-4732-a623-8eb4aa5ecff1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.813556] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aad5f67a-ad8f-4d0d-977c-1e65ada7682a could not be found. [ 620.813556] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.813556] env[61972]: INFO nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 620.813556] env[61972]: DEBUG oslo.service.loopingcall [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.813556] env[61972]: DEBUG nova.compute.manager [-] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 620.813556] env[61972]: DEBUG nova.network.neutron [-] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.833868] env[61972]: DEBUG nova.network.neutron [-] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.999863] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000060] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000192] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000315] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000432] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000550] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000668] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000783] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 621.000902] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Didn't find any instances for network info cache update. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 621.001125] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.001278] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.004331] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.004331] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.004331] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.004331] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.004331] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 621.004331] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.157268] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 621.169409] env[61972]: DEBUG nova.scheduler.client.report [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 621.196269] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 621.196419] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 621.196463] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.196609] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.196756] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.196898] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.197556] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.197782] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.197965] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.198141] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.198312] env[61972]: DEBUG nova.virt.hardware [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.200120] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7b71b7-8916-4697-8938-99e1422ca226 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.211795] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c813a5-6c9f-4152-81a7-132332f867e9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.305412] env[61972]: DEBUG nova.network.neutron [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.337079] env[61972]: DEBUG nova.network.neutron [-] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.441476] env[61972]: DEBUG nova.network.neutron [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.489159] env[61972]: ERROR nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 621.489159] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 621.489159] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.489159] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.489159] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.489159] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.489159] env[61972]: ERROR nova.compute.manager raise self.value [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.489159] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 621.489159] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.489159] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 621.489878] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.489878] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 621.489878] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 621.489878] env[61972]: ERROR nova.compute.manager [ 621.489878] env[61972]: Traceback (most recent call last): [ 621.489878] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 621.489878] env[61972]: listener.cb(fileno) [ 621.489878] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.489878] env[61972]: result = function(*args, **kwargs) [ 621.489878] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 621.489878] env[61972]: return func(*args, **kwargs) [ 621.489878] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 621.489878] env[61972]: raise e [ 621.489878] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 621.489878] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 621.489878] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.489878] env[61972]: created_port_ids = self._update_ports_for_instance( [ 621.489878] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.489878] env[61972]: with excutils.save_and_reraise_exception(): [ 621.489878] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.489878] env[61972]: self.force_reraise() [ 621.489878] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.489878] env[61972]: raise self.value [ 621.489878] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.489878] env[61972]: updated_port = self._update_port( [ 621.489878] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.489878] env[61972]: _ensure_no_port_binding_failure(port) [ 621.489878] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.489878] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 621.490911] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 621.490911] env[61972]: Removing descriptor: 21 [ 621.490911] env[61972]: ERROR nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] Traceback (most recent call last): [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] yield resources [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self.driver.spawn(context, instance, image_meta, [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.490911] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] vm_ref = self.build_virtual_machine(instance, [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] for vif in network_info: [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return self._sync_wrapper(fn, *args, **kwargs) [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self.wait() [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self[:] = self._gt.wait() [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return self._exit_event.wait() [ 621.491481] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] result = hub.switch() [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return self.greenlet.switch() [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] result = function(*args, **kwargs) [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return func(*args, **kwargs) [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] raise e [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] nwinfo = self.network_api.allocate_for_instance( [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.492216] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] created_port_ids = self._update_ports_for_instance( [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] with excutils.save_and_reraise_exception(): [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self.force_reraise() [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] raise self.value [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] updated_port = self._update_port( [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] _ensure_no_port_binding_failure(port) [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.492775] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] raise exception.PortBindingFailed(port_id=port['id']) [ 621.493363] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 621.493363] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] [ 621.493363] env[61972]: INFO nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Terminating instance [ 621.505287] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.675625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.677045] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 621.678808] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.029s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.839555] env[61972]: INFO nova.compute.manager [-] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Took 1.03 seconds to deallocate network for instance. [ 621.844499] env[61972]: DEBUG nova.compute.claims [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.844499] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.944795] env[61972]: DEBUG oslo_concurrency.lockutils [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] Releasing lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.945107] env[61972]: DEBUG nova.compute.manager [req-1e1496f8-f687-41ff-9c7a-3adbd3ae6b83 req-3afd6741-d39e-4023-a22d-76957f38e2b1 service nova] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Received event network-vif-deleted-f086ddb3-723b-468a-a58f-fa7730f53cae {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 621.994830] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.994830] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquired lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.994972] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.183265] env[61972]: DEBUG nova.compute.utils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 622.188296] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 622.188468] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 622.207860] env[61972]: DEBUG nova.compute.manager [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Received event network-changed-12773b1b-c8da-4e56-8919-b5a36759edc7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 622.207945] env[61972]: DEBUG nova.compute.manager [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Refreshing instance network info cache due to event network-changed-12773b1b-c8da-4e56-8919-b5a36759edc7. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 622.208116] env[61972]: DEBUG oslo_concurrency.lockutils [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] Acquiring lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.240218] env[61972]: DEBUG nova.policy [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bdbb74770364b4a8700aa7aa967b584', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b445e0d9bbfa4a528bc49fff621c484c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 622.525690] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.578792] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698e33f3-5610-4c86-8961-8030d5ef0951 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.587682] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6dc839-e725-4625-9e43-00e72db5537f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.617013] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf86eda-8431-47c0-a69d-6e71d6b51580 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.624539] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad59597-1a6e-4638-83cb-4f6f708c7a25 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.637581] env[61972]: DEBUG nova.compute.provider_tree [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.688770] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 622.733695] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.897531] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Successfully created port: d655116c-27da-4401-8efc-03026926cbe0 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.142085] env[61972]: DEBUG nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 623.234198] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Successfully created port: e04ddf1b-0dec-41c8-a815-db5b30177585 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.238976] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Releasing lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.239472] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 623.239633] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.241290] env[61972]: DEBUG oslo_concurrency.lockutils [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] Acquired lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.241290] env[61972]: DEBUG nova.network.neutron [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Refreshing network info cache for port 12773b1b-c8da-4e56-8919-b5a36759edc7 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 623.242087] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd60dd08-fd81-413a-b4cf-bc64c251f670 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.252101] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a773bb9-2515-4701-8219-985a3b9bda02 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.282086] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98905e39-fda3-47a8-867d-130d76894e53 could not be found. [ 623.282344] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.282522] env[61972]: INFO nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Took 0.04 seconds to destroy the instance on the hypervisor. [ 623.282771] env[61972]: DEBUG oslo.service.loopingcall [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.283837] env[61972]: DEBUG nova.compute.manager [-] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 623.283837] env[61972]: DEBUG nova.network.neutron [-] [instance: 98905e39-fda3-47a8-867d-130d76894e53] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.317291] env[61972]: DEBUG nova.network.neutron [-] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.611848] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Successfully created port: 3d50f407-32cf-4ef2-a577-cbf0393f7006 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.651042] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.972s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.651706] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Traceback (most recent call last): [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self.driver.spawn(context, instance, image_meta, [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] vm_ref = self.build_virtual_machine(instance, [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.651706] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] for vif in network_info: [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return self._sync_wrapper(fn, *args, **kwargs) [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self.wait() [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self[:] = self._gt.wait() [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return self._exit_event.wait() [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] result = hub.switch() [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.652234] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return self.greenlet.switch() [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] result = function(*args, **kwargs) [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] return func(*args, **kwargs) [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] raise e [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] nwinfo = self.network_api.allocate_for_instance( [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] created_port_ids = self._update_ports_for_instance( [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] with excutils.save_and_reraise_exception(): [ 623.652534] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] self.force_reraise() [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] raise self.value [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] updated_port = self._update_port( [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] _ensure_no_port_binding_failure(port) [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] raise exception.PortBindingFailed(port_id=port['id']) [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] nova.exception.PortBindingFailed: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. [ 623.652991] env[61972]: ERROR nova.compute.manager [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] [ 623.653275] env[61972]: DEBUG nova.compute.utils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 623.653841] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.993s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.655508] env[61972]: INFO nova.compute.claims [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.664480] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Build of instance dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd was re-scheduled: Binding failed for port 10839b72-0e8a-4422-926c-68083c84bf75, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 623.665238] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 623.667621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.667621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquired lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.667621] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.700072] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 623.725841] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.726421] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.726795] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.727598] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.727598] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.727598] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.727870] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.729211] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.729211] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.729211] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.729211] env[61972]: DEBUG nova.virt.hardware [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.730323] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c162151-e7b1-438c-bc5d-b7ef91493cdf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.743413] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe2d79d-fdce-425b-816d-57398149aaec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.765121] env[61972]: DEBUG nova.network.neutron [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.819923] env[61972]: DEBUG nova.network.neutron [-] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.962990] env[61972]: DEBUG nova.network.neutron [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.192649] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.323639] env[61972]: INFO nova.compute.manager [-] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Took 1.04 seconds to deallocate network for instance. [ 624.326401] env[61972]: DEBUG nova.compute.claims [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 624.328099] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.355942] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.466614] env[61972]: DEBUG oslo_concurrency.lockutils [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] Releasing lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.466996] env[61972]: DEBUG nova.compute.manager [req-d29ec9d2-cc65-4a23-a3d7-f42aa224f04e req-417d4239-11a1-4fef-ab66-cf57d7899eba service nova] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Received event network-vif-deleted-12773b1b-c8da-4e56-8919-b5a36759edc7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 624.854638] env[61972]: DEBUG nova.compute.manager [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Received event network-changed-d655116c-27da-4401-8efc-03026926cbe0 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 624.854911] env[61972]: DEBUG nova.compute.manager [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Refreshing instance network info cache due to event network-changed-d655116c-27da-4401-8efc-03026926cbe0. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 624.855128] env[61972]: DEBUG oslo_concurrency.lockutils [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] Acquiring lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.855197] env[61972]: DEBUG oslo_concurrency.lockutils [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] Acquired lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.855410] env[61972]: DEBUG nova.network.neutron [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Refreshing network info cache for port d655116c-27da-4401-8efc-03026926cbe0 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.859596] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Releasing lock "refresh_cache-dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.859675] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 624.859822] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 624.859987] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.894172] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.028883] env[61972]: ERROR nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 625.028883] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 625.028883] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.028883] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.028883] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.028883] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.028883] env[61972]: ERROR nova.compute.manager raise self.value [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.028883] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 625.028883] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.028883] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 625.030646] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.030646] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 625.030646] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 625.030646] env[61972]: ERROR nova.compute.manager [ 625.030646] env[61972]: Traceback (most recent call last): [ 625.030646] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 625.030646] env[61972]: listener.cb(fileno) [ 625.030646] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.030646] env[61972]: result = function(*args, **kwargs) [ 625.030646] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.030646] env[61972]: return func(*args, **kwargs) [ 625.030646] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 625.030646] env[61972]: raise e [ 625.030646] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 625.030646] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 625.030646] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.030646] env[61972]: created_port_ids = self._update_ports_for_instance( [ 625.030646] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.030646] env[61972]: with excutils.save_and_reraise_exception(): [ 625.030646] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.030646] env[61972]: self.force_reraise() [ 625.030646] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.030646] env[61972]: raise self.value [ 625.030646] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.030646] env[61972]: updated_port = self._update_port( [ 625.030646] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.030646] env[61972]: _ensure_no_port_binding_failure(port) [ 625.030646] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.030646] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 625.031496] env[61972]: nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 625.031496] env[61972]: Removing descriptor: 21 [ 625.031496] env[61972]: ERROR nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Traceback (most recent call last): [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] yield resources [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self.driver.spawn(context, instance, image_meta, [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.031496] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] vm_ref = self.build_virtual_machine(instance, [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] for vif in network_info: [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return self._sync_wrapper(fn, *args, **kwargs) [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self.wait() [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self[:] = self._gt.wait() [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return self._exit_event.wait() [ 625.031813] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] result = hub.switch() [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return self.greenlet.switch() [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] result = function(*args, **kwargs) [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return func(*args, **kwargs) [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] raise e [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] nwinfo = self.network_api.allocate_for_instance( [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.032174] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] created_port_ids = self._update_ports_for_instance( [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] with excutils.save_and_reraise_exception(): [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self.force_reraise() [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] raise self.value [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] updated_port = self._update_port( [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] _ensure_no_port_binding_failure(port) [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.032478] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] raise exception.PortBindingFailed(port_id=port['id']) [ 625.032807] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 625.032807] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] [ 625.032807] env[61972]: INFO nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Terminating instance [ 625.136177] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07797f2d-6dc3-453d-982b-253985cdce88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.144126] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bab2725-890d-4796-959c-4f0b2d53cb50 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.174458] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cc78bb-c7b2-4b82-bf57-39b246553c18 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.182121] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a35f7f7-8906-4f9d-b7ba-38e23ac3d634 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.195920] env[61972]: DEBUG nova.compute.provider_tree [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.398428] env[61972]: DEBUG nova.network.neutron [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.400663] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.480694] env[61972]: DEBUG nova.network.neutron [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.541211] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.700344] env[61972]: DEBUG nova.scheduler.client.report [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 625.903178] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd] Took 1.04 seconds to deallocate network for instance. [ 625.984861] env[61972]: DEBUG oslo_concurrency.lockutils [req-8e1bfd29-da8b-483f-bb6e-c9e4ebb6c16f req-77c271a6-65c7-4dcc-ac30-6e51b5e79004 service nova] Releasing lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.985380] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquired lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.985707] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.205502] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.206221] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 626.208773] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.655s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.518148] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.591479] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.713925] env[61972]: DEBUG nova.compute.utils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 626.714791] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 626.714964] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 626.782172] env[61972]: DEBUG nova.policy [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06452b75b6164422a6d6916ee7e1126d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c854c4989c58433688b50640d15ad6e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 626.900488] env[61972]: DEBUG nova.compute.manager [req-a21d0c77-bd4d-4832-a052-b1427ffe002d req-4539cc4c-d43e-45e9-a981-9075b61a9918 service nova] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Received event network-vif-deleted-d655116c-27da-4401-8efc-03026926cbe0 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 626.938132] env[61972]: INFO nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Deleted allocations for instance dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd [ 627.093722] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Releasing lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.094574] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 627.094574] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 627.097170] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68e2089b-bdf8-41fa-a860-d3d1b6f95059 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.108017] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c171b6f4-1971-41a0-bdf3-cd3f4a8edfb8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.130656] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a48c6a10-0c00-40f8-831f-713213390dfa could not be found. [ 627.130901] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 627.131099] env[61972]: INFO nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 627.131355] env[61972]: DEBUG oslo.service.loopingcall [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.134105] env[61972]: DEBUG nova.compute.manager [-] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 627.134238] env[61972]: DEBUG nova.network.neutron [-] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.178659] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1a185f-eb3d-4a20-a615-4d2b6dc90964 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.186426] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d59e538-127f-4e1b-b6e8-449d9dec0d9a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.218959] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f78592e-a199-45e6-bc60-d1d5cdf95634 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.223016] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 627.234295] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110627b4-860a-4f45-b7d0-fae3eff55640 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.246949] env[61972]: DEBUG nova.compute.provider_tree [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.307230] env[61972]: DEBUG nova.network.neutron [-] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.441135] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Successfully created port: 9f77ad1b-ec8f-4cc9-930e-3b06113ed995 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.456603] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "dee7bce8-b241-4cb9-8cbd-fd4e160ab9bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.092s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.751457] env[61972]: DEBUG nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 627.959286] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 628.237058] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 628.261026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.052s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.261153] env[61972]: ERROR nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Traceback (most recent call last): [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self.driver.spawn(context, instance, image_meta, [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] vm_ref = self.build_virtual_machine(instance, [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.261153] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] for vif in network_info: [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return self._sync_wrapper(fn, *args, **kwargs) [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self.wait() [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self[:] = self._gt.wait() [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return self._exit_event.wait() [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] result = hub.switch() [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.261460] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return self.greenlet.switch() [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] result = function(*args, **kwargs) [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] return func(*args, **kwargs) [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] raise e [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] nwinfo = self.network_api.allocate_for_instance( [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] created_port_ids = self._update_ports_for_instance( [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] with excutils.save_and_reraise_exception(): [ 628.261830] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] self.force_reraise() [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] raise self.value [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] updated_port = self._update_port( [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] _ensure_no_port_binding_failure(port) [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] raise exception.PortBindingFailed(port_id=port['id']) [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] nova.exception.PortBindingFailed: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. [ 628.262243] env[61972]: ERROR nova.compute.manager [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] [ 628.264560] env[61972]: DEBUG nova.compute.utils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.265421] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.459s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.267071] env[61972]: INFO nova.compute.claims [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.270313] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Build of instance 56b1ea80-3109-4212-959b-0e5fb2fc66d3 was re-scheduled: Binding failed for port 2802b852-ca47-4063-87c9-cfe535e0cc35, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 628.270983] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 628.271369] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquiring lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.271708] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Acquired lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.272085] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.277027] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.277027] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.277027] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.277374] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.277660] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.277912] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.278780] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.278780] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.278780] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.278780] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.279439] env[61972]: DEBUG nova.virt.hardware [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.280394] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e1c93e-57ef-47c0-809a-d8ce7398e71a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.295025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f8abb8-4217-4a71-9d58-266a69dda887 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.311406] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.481414] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.488942] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.669378] env[61972]: DEBUG nova.network.neutron [-] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.966908] env[61972]: DEBUG nova.compute.manager [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Received event network-changed-9f77ad1b-ec8f-4cc9-930e-3b06113ed995 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 628.966908] env[61972]: DEBUG nova.compute.manager [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Refreshing instance network info cache due to event network-changed-9f77ad1b-ec8f-4cc9-930e-3b06113ed995. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 628.966908] env[61972]: DEBUG oslo_concurrency.lockutils [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] Acquiring lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.966908] env[61972]: DEBUG oslo_concurrency.lockutils [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] Acquired lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.966908] env[61972]: DEBUG nova.network.neutron [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Refreshing network info cache for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.991422] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Releasing lock "refresh_cache-56b1ea80-3109-4212-959b-0e5fb2fc66d3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.991614] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 628.991836] env[61972]: DEBUG nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 628.992015] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 629.023273] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.043133] env[61972]: ERROR nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 629.043133] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 629.043133] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.043133] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.043133] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.043133] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.043133] env[61972]: ERROR nova.compute.manager raise self.value [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.043133] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 629.043133] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.043133] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 629.043638] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.043638] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 629.043638] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 629.043638] env[61972]: ERROR nova.compute.manager [ 629.043638] env[61972]: Traceback (most recent call last): [ 629.043638] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 629.043638] env[61972]: listener.cb(fileno) [ 629.043638] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.043638] env[61972]: result = function(*args, **kwargs) [ 629.043638] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 629.043638] env[61972]: return func(*args, **kwargs) [ 629.043638] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 629.043638] env[61972]: raise e [ 629.043638] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 629.043638] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 629.043638] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.043638] env[61972]: created_port_ids = self._update_ports_for_instance( [ 629.043638] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.043638] env[61972]: with excutils.save_and_reraise_exception(): [ 629.043638] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.043638] env[61972]: self.force_reraise() [ 629.043638] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.043638] env[61972]: raise self.value [ 629.043638] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.043638] env[61972]: updated_port = self._update_port( [ 629.043638] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.043638] env[61972]: _ensure_no_port_binding_failure(port) [ 629.043638] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.043638] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 629.044324] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 629.044324] env[61972]: Removing descriptor: 21 [ 629.044580] env[61972]: ERROR nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Traceback (most recent call last): [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] yield resources [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self.driver.spawn(context, instance, image_meta, [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] vm_ref = self.build_virtual_machine(instance, [ 629.044580] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] for vif in network_info: [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return self._sync_wrapper(fn, *args, **kwargs) [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self.wait() [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self[:] = self._gt.wait() [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return self._exit_event.wait() [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 629.045043] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] result = hub.switch() [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return self.greenlet.switch() [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] result = function(*args, **kwargs) [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return func(*args, **kwargs) [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] raise e [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] nwinfo = self.network_api.allocate_for_instance( [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] created_port_ids = self._update_ports_for_instance( [ 629.045661] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] with excutils.save_and_reraise_exception(): [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self.force_reraise() [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] raise self.value [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] updated_port = self._update_port( [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] _ensure_no_port_binding_failure(port) [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] raise exception.PortBindingFailed(port_id=port['id']) [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 629.046246] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] [ 629.046790] env[61972]: INFO nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Terminating instance [ 629.172307] env[61972]: INFO nova.compute.manager [-] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Took 2.04 seconds to deallocate network for instance. [ 629.174901] env[61972]: DEBUG nova.compute.claims [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 629.174901] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.482269] env[61972]: DEBUG nova.network.neutron [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.527490] env[61972]: DEBUG nova.network.neutron [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.552900] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquiring lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.611518] env[61972]: DEBUG nova.network.neutron [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.633140] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0a67fd-54b7-4ce2-8a2f-cbb5c3815b01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.641297] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34b3e87-681f-4a4e-b93f-f95f2e4ce013 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.671672] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e1f7b0-4f57-45b9-b898-68c59d9ff31a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.678749] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c621c510-d5f8-4de3-8e5a-7b499a153b13 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.691659] env[61972]: DEBUG nova.compute.provider_tree [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.032343] env[61972]: INFO nova.compute.manager [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] [instance: 56b1ea80-3109-4212-959b-0e5fb2fc66d3] Took 1.04 seconds to deallocate network for instance. [ 630.114665] env[61972]: DEBUG oslo_concurrency.lockutils [req-ce22174b-e04f-41f1-913b-dc22ec90329a req-3c571362-4974-4bd7-b81d-08a4c8ec21ab service nova] Releasing lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.115074] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquired lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.115260] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.196053] env[61972]: DEBUG nova.scheduler.client.report [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 630.633684] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.701058] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.701581] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 630.704850] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.127s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.750519] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.992575] env[61972]: DEBUG nova.compute.manager [req-c590af2d-342f-4c6b-a37d-ca440f073427 req-9446737e-89ee-4f69-8f94-295cb87f869e service nova] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Received event network-vif-deleted-9f77ad1b-ec8f-4cc9-930e-3b06113ed995 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 631.060762] env[61972]: INFO nova.scheduler.client.report [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Deleted allocations for instance 56b1ea80-3109-4212-959b-0e5fb2fc66d3 [ 631.209381] env[61972]: DEBUG nova.compute.utils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.213549] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 631.213733] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.253275] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Releasing lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.253698] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 631.253907] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 631.254754] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f9f937e-e167-48fc-beb7-2ceffdee2065 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.266027] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3456527a-be0b-41ab-9f2a-5ed4c22572c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.279824] env[61972]: DEBUG nova.policy [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3938d412f0314927ab10800e88e83d4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '860e3f76627645c6afc50616f17e50da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 631.295577] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3f4a6562-4c28-479c-8665-fb61c2d64dae could not be found. [ 631.295801] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.295980] env[61972]: INFO nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 631.296232] env[61972]: DEBUG oslo.service.loopingcall [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.298651] env[61972]: DEBUG nova.compute.manager [-] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 631.298754] env[61972]: DEBUG nova.network.neutron [-] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 631.322044] env[61972]: DEBUG nova.network.neutron [-] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.569691] env[61972]: DEBUG oslo_concurrency.lockutils [None req-880c4a0a-eac5-4d7c-8d81-9bd32e8c89d1 tempest-ListServersNegativeTestJSON-50119990 tempest-ListServersNegativeTestJSON-50119990-project-member] Lock "56b1ea80-3109-4212-959b-0e5fb2fc66d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.058s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.610712] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0b2535-5fc7-4ed4-8033-689698567f3d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.618633] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4259fb9a-8f88-451c-a874-b64b76d130da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.650855] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054e441b-ffce-4ec2-ac0a-0344242f674e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.659025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e593a902-cf27-433e-a751-5b2d390f38e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.672544] env[61972]: DEBUG nova.compute.provider_tree [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.685487] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Successfully created port: 0c829e7f-aeeb-4b66-83c9-9bfee6a34893 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.717028] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 631.824087] env[61972]: DEBUG nova.network.neutron [-] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.072711] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 632.178018] env[61972]: DEBUG nova.scheduler.client.report [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 632.329816] env[61972]: INFO nova.compute.manager [-] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Took 1.03 seconds to deallocate network for instance. [ 632.332636] env[61972]: DEBUG nova.compute.claims [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 632.332835] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.598994] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.681326] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.977s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.682155] env[61972]: ERROR nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Traceback (most recent call last): [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self.driver.spawn(context, instance, image_meta, [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] vm_ref = self.build_virtual_machine(instance, [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.682155] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] for vif in network_info: [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return self._sync_wrapper(fn, *args, **kwargs) [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self.wait() [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self[:] = self._gt.wait() [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return self._exit_event.wait() [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] result = hub.switch() [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.682777] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return self.greenlet.switch() [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] result = function(*args, **kwargs) [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] return func(*args, **kwargs) [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] raise e [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] nwinfo = self.network_api.allocate_for_instance( [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] created_port_ids = self._update_ports_for_instance( [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] with excutils.save_and_reraise_exception(): [ 632.683161] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] self.force_reraise() [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] raise self.value [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] updated_port = self._update_port( [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] _ensure_no_port_binding_failure(port) [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] raise exception.PortBindingFailed(port_id=port['id']) [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] nova.exception.PortBindingFailed: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. [ 632.683469] env[61972]: ERROR nova.compute.manager [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] [ 632.683772] env[61972]: DEBUG nova.compute.utils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.684204] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.669s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.690129] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Build of instance 489fc6c6-c9a5-40a8-81a4-7677f55743fe was re-scheduled: Binding failed for port a6d2c7c6-3bbb-406e-8dfe-a0dea8a8684f, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 632.690129] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 632.690129] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquiring lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.690129] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Acquired lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.690294] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.725847] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 632.755094] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.755094] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.755094] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.755415] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.755415] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.755415] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.755415] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.755415] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.755617] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.755617] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.755964] env[61972]: DEBUG nova.virt.hardware [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.757495] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12a7512-1cab-43bb-856e-6bfafc193ca2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.767794] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fbf4a6-8870-4d22-9ab7-95de34fe761b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.042636] env[61972]: DEBUG nova.compute.manager [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Received event network-changed-0c829e7f-aeeb-4b66-83c9-9bfee6a34893 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 633.043150] env[61972]: DEBUG nova.compute.manager [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Refreshing instance network info cache due to event network-changed-0c829e7f-aeeb-4b66-83c9-9bfee6a34893. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 633.043507] env[61972]: DEBUG oslo_concurrency.lockutils [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] Acquiring lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.043632] env[61972]: DEBUG oslo_concurrency.lockutils [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] Acquired lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.043876] env[61972]: DEBUG nova.network.neutron [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Refreshing network info cache for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 633.070161] env[61972]: ERROR nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 633.070161] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.070161] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.070161] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.070161] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.070161] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.070161] env[61972]: ERROR nova.compute.manager raise self.value [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.070161] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 633.070161] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.070161] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 633.070571] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.070571] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 633.070571] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 633.070571] env[61972]: ERROR nova.compute.manager [ 633.070571] env[61972]: Traceback (most recent call last): [ 633.070571] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 633.070571] env[61972]: listener.cb(fileno) [ 633.070571] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.070571] env[61972]: result = function(*args, **kwargs) [ 633.070571] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.070571] env[61972]: return func(*args, **kwargs) [ 633.070571] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 633.070571] env[61972]: raise e [ 633.070571] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.070571] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 633.070571] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.070571] env[61972]: created_port_ids = self._update_ports_for_instance( [ 633.070571] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.070571] env[61972]: with excutils.save_and_reraise_exception(): [ 633.070571] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.070571] env[61972]: self.force_reraise() [ 633.070571] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.070571] env[61972]: raise self.value [ 633.070571] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.070571] env[61972]: updated_port = self._update_port( [ 633.070571] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.070571] env[61972]: _ensure_no_port_binding_failure(port) [ 633.070571] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.070571] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 633.071298] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 633.071298] env[61972]: Removing descriptor: 21 [ 633.071298] env[61972]: ERROR nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Traceback (most recent call last): [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] yield resources [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self.driver.spawn(context, instance, image_meta, [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.071298] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] vm_ref = self.build_virtual_machine(instance, [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] for vif in network_info: [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return self._sync_wrapper(fn, *args, **kwargs) [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self.wait() [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self[:] = self._gt.wait() [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return self._exit_event.wait() [ 633.071622] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] result = hub.switch() [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return self.greenlet.switch() [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] result = function(*args, **kwargs) [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return func(*args, **kwargs) [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] raise e [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] nwinfo = self.network_api.allocate_for_instance( [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.072013] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] created_port_ids = self._update_ports_for_instance( [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] with excutils.save_and_reraise_exception(): [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self.force_reraise() [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] raise self.value [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] updated_port = self._update_port( [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] _ensure_no_port_binding_failure(port) [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.072375] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] raise exception.PortBindingFailed(port_id=port['id']) [ 633.072704] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 633.072704] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] [ 633.072704] env[61972]: INFO nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Terminating instance [ 633.206849] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.284878] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.569192] env[61972]: DEBUG nova.network.neutron [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.574878] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.581020] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9899280-ee93-4598-87a1-a8456f50dbf1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.587791] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f19f5a7-ce4e-45a3-815c-2e0fcad0dbf9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.631273] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a9ac8a-341d-4a77-8e3a-3b896c410085 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.639294] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519379e0-ed6e-47a9-9825-80168b65482c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.657175] env[61972]: DEBUG nova.compute.provider_tree [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.721217] env[61972]: DEBUG nova.network.neutron [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.788645] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Releasing lock "refresh_cache-489fc6c6-c9a5-40a8-81a4-7677f55743fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.788905] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 633.789165] env[61972]: DEBUG nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 633.789278] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.810862] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.160571] env[61972]: DEBUG nova.scheduler.client.report [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 634.223521] env[61972]: DEBUG oslo_concurrency.lockutils [req-e76c0352-6be6-447a-bc18-b027eea6d431 req-b1ba568f-b6f6-48ce-bd1b-5b055f9f473f service nova] Releasing lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.223946] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquired lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.224186] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.314938] env[61972]: DEBUG nova.network.neutron [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.553280] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquiring lock "bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.553560] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Lock "bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.666879] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.983s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.667905] env[61972]: ERROR nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Traceback (most recent call last): [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self.driver.spawn(context, instance, image_meta, [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] vm_ref = self.build_virtual_machine(instance, [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 634.667905] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] for vif in network_info: [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return self._sync_wrapper(fn, *args, **kwargs) [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self.wait() [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self[:] = self._gt.wait() [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return self._exit_event.wait() [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] result = hub.switch() [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 634.668213] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return self.greenlet.switch() [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] result = function(*args, **kwargs) [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] return func(*args, **kwargs) [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] raise e [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] nwinfo = self.network_api.allocate_for_instance( [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] created_port_ids = self._update_ports_for_instance( [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] with excutils.save_and_reraise_exception(): [ 634.668635] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] self.force_reraise() [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] raise self.value [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] updated_port = self._update_port( [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] _ensure_no_port_binding_failure(port) [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] raise exception.PortBindingFailed(port_id=port['id']) [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] nova.exception.PortBindingFailed: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. [ 634.668999] env[61972]: ERROR nova.compute.manager [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] [ 634.669321] env[61972]: DEBUG nova.compute.utils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 634.669795] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.624s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.674059] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Build of instance cf7ea49c-91ff-4c81-803c-90608c2849dc was re-scheduled: Binding failed for port 61075218-709a-43a5-a0f4-4f4c50ec0251, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 634.674457] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 634.674684] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.674830] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquired lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.674990] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.743859] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.817346] env[61972]: INFO nova.compute.manager [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] [instance: 489fc6c6-c9a5-40a8-81a4-7677f55743fe] Took 1.03 seconds to deallocate network for instance. [ 634.841097] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.113529] env[61972]: DEBUG nova.compute.manager [req-7fc6a46e-2560-42b8-85fc-8d24b43afa67 req-9768fec2-507f-44e7-b771-2d39794eebde service nova] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Received event network-vif-deleted-0c829e7f-aeeb-4b66-83c9-9bfee6a34893 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 635.214063] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.343561] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Releasing lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.343934] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 635.344188] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.344686] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7238180f-365c-4fff-a0e7-2c81e364e7c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.359794] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e92e742-0849-430e-a8be-77b515141bc0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.371312] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.387331] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8e485a2-3c56-4871-be93-59359e465cd6 could not be found. [ 635.387606] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.387788] env[61972]: INFO nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 635.388055] env[61972]: DEBUG oslo.service.loopingcall [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.390647] env[61972]: DEBUG nova.compute.manager [-] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 635.390750] env[61972]: DEBUG nova.network.neutron [-] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.408520] env[61972]: DEBUG nova.network.neutron [-] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.609706] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3187bb-c330-4341-8a86-9156fdedb808 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.617475] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d667be16-22e4-4c67-b0e3-87167c8c96ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.646358] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043e1f03-a44f-4658-aa99-79954e7f56fd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.653906] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f98add5-621c-419c-8f03-3d553722dd84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.667905] env[61972]: DEBUG nova.compute.provider_tree [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.855792] env[61972]: INFO nova.scheduler.client.report [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Deleted allocations for instance 489fc6c6-c9a5-40a8-81a4-7677f55743fe [ 635.876209] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Releasing lock "refresh_cache-cf7ea49c-91ff-4c81-803c-90608c2849dc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.876447] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 635.876622] env[61972]: DEBUG nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 635.876785] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.894232] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.911015] env[61972]: DEBUG nova.network.neutron [-] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.171345] env[61972]: DEBUG nova.scheduler.client.report [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 636.364392] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1298ac61-dbd9-4296-aa35-9e8045a3e631 tempest-ImagesNegativeTestJSON-2027549822 tempest-ImagesNegativeTestJSON-2027549822-project-member] Lock "489fc6c6-c9a5-40a8-81a4-7677f55743fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.521s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.396865] env[61972]: DEBUG nova.network.neutron [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.414600] env[61972]: INFO nova.compute.manager [-] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Took 1.02 seconds to deallocate network for instance. [ 636.417407] env[61972]: DEBUG nova.compute.claims [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 636.417580] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.675897] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.676544] env[61972]: ERROR nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Traceback (most recent call last): [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self.driver.spawn(context, instance, image_meta, [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] vm_ref = self.build_virtual_machine(instance, [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] vif_infos = vmwarevif.get_vif_info(self._session, [ 636.676544] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] for vif in network_info: [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return self._sync_wrapper(fn, *args, **kwargs) [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self.wait() [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self[:] = self._gt.wait() [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return self._exit_event.wait() [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] result = hub.switch() [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.676885] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return self.greenlet.switch() [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] result = function(*args, **kwargs) [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] return func(*args, **kwargs) [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] raise e [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] nwinfo = self.network_api.allocate_for_instance( [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] created_port_ids = self._update_ports_for_instance( [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] with excutils.save_and_reraise_exception(): [ 636.677256] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] self.force_reraise() [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] raise self.value [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] updated_port = self._update_port( [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] _ensure_no_port_binding_failure(port) [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] raise exception.PortBindingFailed(port_id=port['id']) [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] nova.exception.PortBindingFailed: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. [ 636.677613] env[61972]: ERROR nova.compute.manager [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] [ 636.678013] env[61972]: DEBUG nova.compute.utils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 636.678479] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.173s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.678652] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.678801] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 636.679097] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.835s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.682709] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Build of instance 81775c2c-328e-4e33-8ff7-40a9f638ec76 was re-scheduled: Binding failed for port d59aa4aa-ae41-40dd-a250-43f0ba448dc2, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 636.683122] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 636.683387] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquiring lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.683469] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Acquired lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.683619] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.685071] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28af8633-ab48-4523-9b32-163466e6f12d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.694215] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bb383f-df25-473e-9604-04c416e4a60c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.709108] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09113e81-5d44-493c-8e56-debc8b6ae5c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.715954] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7deff4fb-ecc1-4419-a7bf-ac5c3555675f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.747683] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181396MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 636.747870] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.778282] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "036a2dfc-615d-410a-8a3f-32de621879c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.778282] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "036a2dfc-615d-410a-8a3f-32de621879c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.869251] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 636.900177] env[61972]: INFO nova.compute.manager [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: cf7ea49c-91ff-4c81-803c-90608c2849dc] Took 1.02 seconds to deallocate network for instance. [ 637.216443] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.385339] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.409390] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.596462] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b8f624-9d51-43ff-b5c5-d239f6e806a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.606261] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a2aaa1-b25e-41c5-91a9-5124b1001ee7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.634938] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2953b97-3092-4fae-9d62-56eaaca81373 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.642018] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac36a932-b0fb-48c6-896b-9f3121a14d0f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.656545] env[61972]: DEBUG nova.compute.provider_tree [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.913432] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Releasing lock "refresh_cache-81775c2c-328e-4e33-8ff7-40a9f638ec76" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.913432] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 637.913432] env[61972]: DEBUG nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 637.913432] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 637.934364] env[61972]: INFO nova.scheduler.client.report [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Deleted allocations for instance cf7ea49c-91ff-4c81-803c-90608c2849dc [ 637.973718] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.160803] env[61972]: DEBUG nova.scheduler.client.report [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 638.447704] env[61972]: DEBUG oslo_concurrency.lockutils [None req-90686ad8-0103-4bd2-9724-5982bdad2307 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "cf7ea49c-91ff-4c81-803c-90608c2849dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.355s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.475927] env[61972]: DEBUG nova.network.neutron [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.665789] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.987s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.666470] env[61972]: ERROR nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Traceback (most recent call last): [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self.driver.spawn(context, instance, image_meta, [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] vm_ref = self.build_virtual_machine(instance, [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] vif_infos = vmwarevif.get_vif_info(self._session, [ 638.666470] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] for vif in network_info: [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return self._sync_wrapper(fn, *args, **kwargs) [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self.wait() [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self[:] = self._gt.wait() [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return self._exit_event.wait() [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] result = hub.switch() [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 638.666908] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return self.greenlet.switch() [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] result = function(*args, **kwargs) [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] return func(*args, **kwargs) [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] raise e [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] nwinfo = self.network_api.allocate_for_instance( [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] created_port_ids = self._update_ports_for_instance( [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] with excutils.save_and_reraise_exception(): [ 638.667427] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] self.force_reraise() [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] raise self.value [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] updated_port = self._update_port( [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] _ensure_no_port_binding_failure(port) [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] raise exception.PortBindingFailed(port_id=port['id']) [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] nova.exception.PortBindingFailed: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. [ 638.667896] env[61972]: ERROR nova.compute.manager [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] [ 638.668329] env[61972]: DEBUG nova.compute.utils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 638.668392] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.342s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.671424] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Build of instance aad5f67a-ad8f-4d0d-977c-1e65ada7682a was re-scheduled: Binding failed for port f086ddb3-723b-468a-a58f-fa7730f53cae, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 638.671870] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 638.672152] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquiring lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.672323] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Acquired lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.672486] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.952210] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 638.980068] env[61972]: INFO nova.compute.manager [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] [instance: 81775c2c-328e-4e33-8ff7-40a9f638ec76] Took 1.07 seconds to deallocate network for instance. [ 639.200561] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.357859] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.474832] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.577091] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4bba7e-e9b1-42db-9293-d2fa1f3f2541 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.583614] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef40139-c7d3-49ab-a282-ee0ff793968f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.619355] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49df181e-6cf2-4e80-84eb-38ce238eb65d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.628085] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfe45b6-4932-488b-96e7-a14f843e82f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.641810] env[61972]: DEBUG nova.compute.provider_tree [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.863033] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Releasing lock "refresh_cache-aad5f67a-ad8f-4d0d-977c-1e65ada7682a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.863220] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 639.863388] env[61972]: DEBUG nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 639.863556] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 639.885840] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.015048] env[61972]: INFO nova.scheduler.client.report [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Deleted allocations for instance 81775c2c-328e-4e33-8ff7-40a9f638ec76 [ 640.145473] env[61972]: DEBUG nova.scheduler.client.report [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 640.388326] env[61972]: DEBUG nova.network.neutron [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.524392] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6388e85e-fe24-43c7-a365-3bde72dcb745 tempest-AttachInterfacesV270Test-394902659 tempest-AttachInterfacesV270Test-394902659-project-member] Lock "81775c2c-328e-4e33-8ff7-40a9f638ec76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.823s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.653642] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.985s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.654623] env[61972]: ERROR nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] Traceback (most recent call last): [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self.driver.spawn(context, instance, image_meta, [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self._vmops.spawn(context, instance, image_meta, injected_files, [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] vm_ref = self.build_virtual_machine(instance, [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] vif_infos = vmwarevif.get_vif_info(self._session, [ 640.654623] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] for vif in network_info: [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return self._sync_wrapper(fn, *args, **kwargs) [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self.wait() [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self[:] = self._gt.wait() [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return self._exit_event.wait() [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] result = hub.switch() [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 640.654944] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return self.greenlet.switch() [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] result = function(*args, **kwargs) [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] return func(*args, **kwargs) [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] raise e [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] nwinfo = self.network_api.allocate_for_instance( [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] created_port_ids = self._update_ports_for_instance( [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] with excutils.save_and_reraise_exception(): [ 640.655330] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] self.force_reraise() [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] raise self.value [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] updated_port = self._update_port( [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] _ensure_no_port_binding_failure(port) [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] raise exception.PortBindingFailed(port_id=port['id']) [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] nova.exception.PortBindingFailed: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. [ 640.655629] env[61972]: ERROR nova.compute.manager [instance: 98905e39-fda3-47a8-867d-130d76894e53] [ 640.655911] env[61972]: DEBUG nova.compute.utils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 640.656272] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.175s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.657704] env[61972]: INFO nova.compute.claims [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.660685] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Build of instance 98905e39-fda3-47a8-867d-130d76894e53 was re-scheduled: Binding failed for port 12773b1b-c8da-4e56-8919-b5a36759edc7, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 640.661131] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 640.661360] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.661512] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquired lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.661674] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 640.892752] env[61972]: INFO nova.compute.manager [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] [instance: aad5f67a-ad8f-4d0d-977c-1e65ada7682a] Took 1.03 seconds to deallocate network for instance. [ 641.027194] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 641.195377] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.324777] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.552947] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.828508] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Releasing lock "refresh_cache-98905e39-fda3-47a8-867d-130d76894e53" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.829829] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 641.830215] env[61972]: DEBUG nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 641.830308] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 641.861512] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.922044] env[61972]: INFO nova.scheduler.client.report [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Deleted allocations for instance aad5f67a-ad8f-4d0d-977c-1e65ada7682a [ 642.095879] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6728ab-1ad6-4a2f-98ab-f9024ad404d8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.105031] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cc3a54-2667-4476-98bd-6fa793f608c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.139974] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3a7ddd-cf9c-4760-a2ac-aa2a3734a139 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.146696] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da820b4-5adb-4761-a8f1-d57cc60789b6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.161067] env[61972]: DEBUG nova.compute.provider_tree [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.364918] env[61972]: DEBUG nova.network.neutron [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.437220] env[61972]: DEBUG oslo_concurrency.lockutils [None req-75ccdad4-6d72-4c09-bda6-c94dd6984864 tempest-InstanceActionsV221TestJSON-31013247 tempest-InstanceActionsV221TestJSON-31013247-project-member] Lock "aad5f67a-ad8f-4d0d-977c-1e65ada7682a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.298s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.665336] env[61972]: DEBUG nova.scheduler.client.report [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 642.675910] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "6cda8874-6af5-490a-b9a2-323992265eb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.676292] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "6cda8874-6af5-490a-b9a2-323992265eb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.867817] env[61972]: INFO nova.compute.manager [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: 98905e39-fda3-47a8-867d-130d76894e53] Took 1.04 seconds to deallocate network for instance. [ 642.941239] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 643.177976] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.179228] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 643.183431] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.008s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.469015] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.685492] env[61972]: DEBUG nova.compute.utils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 643.687146] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 643.688137] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 643.782991] env[61972]: DEBUG nova.policy [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '232b9128bfd94a52a5f9e340e821a0b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f4e99fb03d94876b605badde8dee459', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 643.904579] env[61972]: INFO nova.scheduler.client.report [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Deleted allocations for instance 98905e39-fda3-47a8-867d-130d76894e53 [ 644.139366] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbb7765-735e-4ea7-bcd5-fb7aeadcb52e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.149387] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc4c2f4-c5d9-4acf-b950-42895593e005 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.187154] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14452632-72de-4e31-86f1-51cacb789b0b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.191654] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 644.199204] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b0e7bd-1076-4177-989f-8616ee115535 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.215021] env[61972]: DEBUG nova.compute.provider_tree [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.416295] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0448d92e-ab24-4344-854e-08959177cf9b tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "98905e39-fda3-47a8-867d-130d76894e53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.172s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.423056] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Successfully created port: 2cf67927-0ab5-4a4f-8ae0-29554256ae47 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.722166] env[61972]: DEBUG nova.scheduler.client.report [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 644.921595] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 645.207539] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 645.224891] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.041s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.225539] env[61972]: ERROR nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Traceback (most recent call last): [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self.driver.spawn(context, instance, image_meta, [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] vm_ref = self.build_virtual_machine(instance, [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.225539] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] for vif in network_info: [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return self._sync_wrapper(fn, *args, **kwargs) [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self.wait() [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self[:] = self._gt.wait() [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return self._exit_event.wait() [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] result = hub.switch() [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.225854] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return self.greenlet.switch() [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] result = function(*args, **kwargs) [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] return func(*args, **kwargs) [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] raise e [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] nwinfo = self.network_api.allocate_for_instance( [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] created_port_ids = self._update_ports_for_instance( [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] with excutils.save_and_reraise_exception(): [ 645.226227] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] self.force_reraise() [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] raise self.value [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] updated_port = self._update_port( [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] _ensure_no_port_binding_failure(port) [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] raise exception.PortBindingFailed(port_id=port['id']) [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] nova.exception.PortBindingFailed: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. [ 645.226585] env[61972]: ERROR nova.compute.manager [instance: a48c6a10-0c00-40f8-831f-713213390dfa] [ 645.226860] env[61972]: DEBUG nova.compute.utils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.227584] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.895s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.232696] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Build of instance a48c6a10-0c00-40f8-831f-713213390dfa was re-scheduled: Binding failed for port d655116c-27da-4401-8efc-03026926cbe0, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 645.233626] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 645.233626] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.233626] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquired lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.234113] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.241177] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 645.241398] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 645.241543] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.241715] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 645.241887] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.242243] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 645.243475] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 645.243475] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 645.243475] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 645.243475] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 645.243475] env[61972]: DEBUG nova.virt.hardware [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.244497] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703d2f4c-844c-4555-a4ab-78ea69f20c98 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.256564] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ba68bb-495c-4246-a681-728890cd3eb7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.383966] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "5b7223bd-66f3-44ec-b3bc-e9072eca515e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.384883] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "5b7223bd-66f3-44ec-b3bc-e9072eca515e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.450956] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.772683] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.930300] env[61972]: DEBUG nova.compute.manager [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Received event network-changed-2cf67927-0ab5-4a4f-8ae0-29554256ae47 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 645.930611] env[61972]: DEBUG nova.compute.manager [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Refreshing instance network info cache due to event network-changed-2cf67927-0ab5-4a4f-8ae0-29554256ae47. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 645.930987] env[61972]: DEBUG oslo_concurrency.lockutils [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] Acquiring lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.931237] env[61972]: DEBUG oslo_concurrency.lockutils [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] Acquired lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.931680] env[61972]: DEBUG nova.network.neutron [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Refreshing network info cache for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 646.000770] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.220993] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fca5b9-585f-42f8-a38a-dd9d2d22f499 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.232535] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6faf664e-e105-4173-be23-84d8501525ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.264983] env[61972]: ERROR nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 646.264983] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 646.264983] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.264983] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.264983] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.264983] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.264983] env[61972]: ERROR nova.compute.manager raise self.value [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.264983] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 646.264983] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.264983] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 646.265569] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.265569] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 646.265569] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 646.265569] env[61972]: ERROR nova.compute.manager [ 646.265569] env[61972]: Traceback (most recent call last): [ 646.265569] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 646.265569] env[61972]: listener.cb(fileno) [ 646.265569] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.265569] env[61972]: result = function(*args, **kwargs) [ 646.265569] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.265569] env[61972]: return func(*args, **kwargs) [ 646.265569] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 646.265569] env[61972]: raise e [ 646.265569] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 646.265569] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 646.265569] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.265569] env[61972]: created_port_ids = self._update_ports_for_instance( [ 646.265569] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.265569] env[61972]: with excutils.save_and_reraise_exception(): [ 646.265569] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.265569] env[61972]: self.force_reraise() [ 646.265569] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.265569] env[61972]: raise self.value [ 646.265569] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.265569] env[61972]: updated_port = self._update_port( [ 646.265569] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.265569] env[61972]: _ensure_no_port_binding_failure(port) [ 646.265569] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.265569] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 646.266307] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 646.266307] env[61972]: Removing descriptor: 21 [ 646.266307] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdea499a-5e01-48ac-ae92-d21d814e05ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.269253] env[61972]: ERROR nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Traceback (most recent call last): [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] yield resources [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self.driver.spawn(context, instance, image_meta, [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] vm_ref = self.build_virtual_machine(instance, [ 646.269253] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] for vif in network_info: [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return self._sync_wrapper(fn, *args, **kwargs) [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self.wait() [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self[:] = self._gt.wait() [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return self._exit_event.wait() [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.269590] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] result = hub.switch() [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return self.greenlet.switch() [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] result = function(*args, **kwargs) [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return func(*args, **kwargs) [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] raise e [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] nwinfo = self.network_api.allocate_for_instance( [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] created_port_ids = self._update_ports_for_instance( [ 646.269936] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] with excutils.save_and_reraise_exception(): [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self.force_reraise() [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] raise self.value [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] updated_port = self._update_port( [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] _ensure_no_port_binding_failure(port) [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] raise exception.PortBindingFailed(port_id=port['id']) [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 646.270679] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] [ 646.271158] env[61972]: INFO nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Terminating instance [ 646.277230] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bbc8a3-ba3a-40ff-86da-77810d74602e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.292110] env[61972]: DEBUG nova.compute.provider_tree [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.466125] env[61972]: DEBUG nova.network.neutron [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.506804] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Releasing lock "refresh_cache-a48c6a10-0c00-40f8-831f-713213390dfa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.507062] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 646.507920] env[61972]: DEBUG nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 646.507920] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 646.544889] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.580300] env[61972]: DEBUG nova.network.neutron [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.777443] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.795833] env[61972]: DEBUG nova.scheduler.client.report [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 647.049316] env[61972]: DEBUG nova.network.neutron [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.083933] env[61972]: DEBUG oslo_concurrency.lockutils [req-aaa2a12f-066f-40f4-9f11-8af6906be90b req-509e68b9-bf72-4964-9877-a242783d9523 service nova] Releasing lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.084219] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquired lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.084346] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.302892] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.075s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.303972] env[61972]: ERROR nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Traceback (most recent call last): [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self.driver.spawn(context, instance, image_meta, [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] vm_ref = self.build_virtual_machine(instance, [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.303972] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] for vif in network_info: [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return self._sync_wrapper(fn, *args, **kwargs) [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self.wait() [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self[:] = self._gt.wait() [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return self._exit_event.wait() [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] result = hub.switch() [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.304367] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return self.greenlet.switch() [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] result = function(*args, **kwargs) [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] return func(*args, **kwargs) [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] raise e [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] nwinfo = self.network_api.allocate_for_instance( [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] created_port_ids = self._update_ports_for_instance( [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] with excutils.save_and_reraise_exception(): [ 647.304707] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] self.force_reraise() [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] raise self.value [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] updated_port = self._update_port( [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] _ensure_no_port_binding_failure(port) [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] raise exception.PortBindingFailed(port_id=port['id']) [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] nova.exception.PortBindingFailed: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. [ 647.305104] env[61972]: ERROR nova.compute.manager [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] [ 647.305451] env[61972]: DEBUG nova.compute.utils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 647.306273] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.707s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.307720] env[61972]: INFO nova.compute.claims [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.312058] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Build of instance 3f4a6562-4c28-479c-8665-fb61c2d64dae was re-scheduled: Binding failed for port 9f77ad1b-ec8f-4cc9-930e-3b06113ed995, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 647.312532] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 647.312894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquiring lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.312992] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Acquired lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.313168] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.551387] env[61972]: INFO nova.compute.manager [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a48c6a10-0c00-40f8-831f-713213390dfa] Took 1.04 seconds to deallocate network for instance. [ 647.606695] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.683348] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.846869] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.008161] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.185179] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Releasing lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.185720] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 648.185974] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 648.186388] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f0521eb-24aa-483c-9a9c-262bac0f31f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.196241] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58875f9-2429-458d-8a95-5f627551ca2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.225703] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d32ec82-e623-4bbb-93c2-d39c934b4890 could not be found. [ 648.226284] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.227317] env[61972]: INFO nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Took 0.04 seconds to destroy the instance on the hypervisor. [ 648.227583] env[61972]: DEBUG oslo.service.loopingcall [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.228416] env[61972]: DEBUG nova.compute.manager [-] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 648.228573] env[61972]: DEBUG nova.network.neutron [-] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.253762] env[61972]: DEBUG nova.network.neutron [-] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.270220] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "dab76349-85ba-4513-afa7-d9a33da1b1fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.270894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "dab76349-85ba-4513-afa7-d9a33da1b1fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.474325] env[61972]: DEBUG nova.compute.manager [req-c84c12b3-5c98-4907-a589-61e32267a515 req-28b47d3c-b850-46ba-a711-c64a5d3c8d07 service nova] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Received event network-vif-deleted-2cf67927-0ab5-4a4f-8ae0-29554256ae47 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 648.512803] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Releasing lock "refresh_cache-3f4a6562-4c28-479c-8665-fb61c2d64dae" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.513200] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 648.513386] env[61972]: DEBUG nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 648.513856] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.533520] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.577934] env[61972]: INFO nova.scheduler.client.report [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Deleted allocations for instance a48c6a10-0c00-40f8-831f-713213390dfa [ 648.748652] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "b986f147-a782-467c-92d1-bffb6a50c450" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.748862] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "b986f147-a782-467c-92d1-bffb6a50c450" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.761772] env[61972]: DEBUG nova.network.neutron [-] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.799937] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquiring lock "21c83740-56b6-4cc8-b97b-2b7a00380b91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.799937] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Lock "21c83740-56b6-4cc8-b97b-2b7a00380b91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.835194] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ee49dc-0627-4add-9a54-1c0804f75df6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.843850] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1224020a-b122-463e-addc-28e66bf51931 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.877302] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68decc5-1feb-4907-af43-78c7c9251f55 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.885119] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e110d4a1-727f-43e5-80b0-281804ca80d4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.898371] env[61972]: DEBUG nova.compute.provider_tree [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.037958] env[61972]: DEBUG nova.network.neutron [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.050380] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "479b311e-e027-4724-bd8b-dffa8903b538" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.050615] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "479b311e-e027-4724-bd8b-dffa8903b538" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.094847] env[61972]: DEBUG oslo_concurrency.lockutils [None req-13f5f468-469c-4cc4-aebe-7fdaef58852b tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "a48c6a10-0c00-40f8-831f-713213390dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.382s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.265714] env[61972]: INFO nova.compute.manager [-] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Took 1.04 seconds to deallocate network for instance. [ 649.269021] env[61972]: DEBUG nova.compute.claims [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 649.269129] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.404780] env[61972]: DEBUG nova.scheduler.client.report [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 649.541013] env[61972]: INFO nova.compute.manager [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] [instance: 3f4a6562-4c28-479c-8665-fb61c2d64dae] Took 1.03 seconds to deallocate network for instance. [ 649.598868] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 649.909969] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.910512] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 649.913069] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.495s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.006502] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "47bd9677-375a-413b-a5c5-989d491adec9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.006736] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "47bd9677-375a-413b-a5c5-989d491adec9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.122916] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.417442] env[61972]: DEBUG nova.compute.utils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.421881] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 650.422011] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.506499] env[61972]: DEBUG nova.policy [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3938d412f0314927ab10800e88e83d4a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '860e3f76627645c6afc50616f17e50da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 650.575369] env[61972]: INFO nova.scheduler.client.report [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Deleted allocations for instance 3f4a6562-4c28-479c-8665-fb61c2d64dae [ 650.849172] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70f7e07-7789-4e17-9432-6b177cae1442 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.857888] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099bb5bc-e1c7-421e-b8a2-0aa0b0938eda {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.889894] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a311f8c-31e6-45e2-b651-504fccffeca8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.897465] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9517528-db6d-4480-9958-df22c181e69b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.910797] env[61972]: DEBUG nova.compute.provider_tree [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.922646] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 651.087500] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f33b26-a40c-4e48-8175-ee9f2b1ef5fa tempest-ServerActionsTestOtherA-1518282946 tempest-ServerActionsTestOtherA-1518282946-project-member] Lock "3f4a6562-4c28-479c-8665-fb61c2d64dae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.761s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.103576] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Successfully created port: 9ae03200-5487-4d70-abc9-a634277bfddc {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.414653] env[61972]: DEBUG nova.scheduler.client.report [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 651.589588] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 651.920830] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.007s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.921369] env[61972]: ERROR nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Traceback (most recent call last): [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self.driver.spawn(context, instance, image_meta, [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] vm_ref = self.build_virtual_machine(instance, [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 651.921369] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] for vif in network_info: [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return self._sync_wrapper(fn, *args, **kwargs) [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self.wait() [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self[:] = self._gt.wait() [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return self._exit_event.wait() [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] result = hub.switch() [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 651.921789] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return self.greenlet.switch() [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] result = function(*args, **kwargs) [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] return func(*args, **kwargs) [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] raise e [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] nwinfo = self.network_api.allocate_for_instance( [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] created_port_ids = self._update_ports_for_instance( [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] with excutils.save_and_reraise_exception(): [ 651.922316] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] self.force_reraise() [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] raise self.value [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] updated_port = self._update_port( [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] _ensure_no_port_binding_failure(port) [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] raise exception.PortBindingFailed(port_id=port['id']) [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] nova.exception.PortBindingFailed: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. [ 651.922781] env[61972]: ERROR nova.compute.manager [instance: b8e485a2-3c56-4871-be93-59359e465cd6] [ 651.923242] env[61972]: DEBUG nova.compute.utils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 651.924870] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.177s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.929264] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Build of instance b8e485a2-3c56-4871-be93-59359e465cd6 was re-scheduled: Binding failed for port 0c829e7f-aeeb-4b66-83c9-9bfee6a34893, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 651.929264] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 651.929264] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.929264] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquired lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.930447] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.937256] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 651.991414] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 651.991939] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 651.992287] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.992589] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 651.992921] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.996019] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 651.996019] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 651.996019] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 651.996019] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 651.996019] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 651.996363] env[61972]: DEBUG nova.virt.hardware [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.996363] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b48305c-c891-4220-a7e8-15828b79de7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.005626] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c22237-2f50-4456-8ee8-83b5dc66bad8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.113705] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.405822] env[61972]: DEBUG nova.compute.manager [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Received event network-changed-9ae03200-5487-4d70-abc9-a634277bfddc {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 652.405822] env[61972]: DEBUG nova.compute.manager [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Refreshing instance network info cache due to event network-changed-9ae03200-5487-4d70-abc9-a634277bfddc. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 652.405822] env[61972]: DEBUG oslo_concurrency.lockutils [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] Acquiring lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.405822] env[61972]: DEBUG oslo_concurrency.lockutils [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] Acquired lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.405822] env[61972]: DEBUG nova.network.neutron [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Refreshing network info cache for port 9ae03200-5487-4d70-abc9-a634277bfddc {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.469604] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.539029] env[61972]: ERROR nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 652.539029] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 652.539029] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.539029] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.539029] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.539029] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.539029] env[61972]: ERROR nova.compute.manager raise self.value [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.539029] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 652.539029] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.539029] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 652.539440] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.539440] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 652.539440] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 652.539440] env[61972]: ERROR nova.compute.manager [ 652.539440] env[61972]: Traceback (most recent call last): [ 652.539440] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 652.539440] env[61972]: listener.cb(fileno) [ 652.539440] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.539440] env[61972]: result = function(*args, **kwargs) [ 652.539440] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.539440] env[61972]: return func(*args, **kwargs) [ 652.539440] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 652.539440] env[61972]: raise e [ 652.539440] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 652.539440] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 652.539440] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.539440] env[61972]: created_port_ids = self._update_ports_for_instance( [ 652.539440] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.539440] env[61972]: with excutils.save_and_reraise_exception(): [ 652.539440] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.539440] env[61972]: self.force_reraise() [ 652.539440] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.539440] env[61972]: raise self.value [ 652.539440] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.539440] env[61972]: updated_port = self._update_port( [ 652.539440] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.539440] env[61972]: _ensure_no_port_binding_failure(port) [ 652.539440] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.539440] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 652.540238] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 652.540238] env[61972]: Removing descriptor: 21 [ 652.540238] env[61972]: ERROR nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Traceback (most recent call last): [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] yield resources [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self.driver.spawn(context, instance, image_meta, [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.540238] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] vm_ref = self.build_virtual_machine(instance, [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] for vif in network_info: [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return self._sync_wrapper(fn, *args, **kwargs) [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self.wait() [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self[:] = self._gt.wait() [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return self._exit_event.wait() [ 652.540655] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] result = hub.switch() [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return self.greenlet.switch() [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] result = function(*args, **kwargs) [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return func(*args, **kwargs) [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] raise e [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] nwinfo = self.network_api.allocate_for_instance( [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.540981] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] created_port_ids = self._update_ports_for_instance( [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] with excutils.save_and_reraise_exception(): [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self.force_reraise() [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] raise self.value [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] updated_port = self._update_port( [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] _ensure_no_port_binding_failure(port) [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.541370] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] raise exception.PortBindingFailed(port_id=port['id']) [ 652.541671] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 652.541671] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] [ 652.541671] env[61972]: INFO nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Terminating instance [ 652.610211] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.924460] env[61972]: DEBUG nova.network.neutron [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.048402] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.114827] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Releasing lock "refresh_cache-b8e485a2-3c56-4871-be93-59359e465cd6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.115096] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 653.115287] env[61972]: DEBUG nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 653.115454] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 653.194176] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.221018] env[61972]: DEBUG nova.network.neutron [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.307999] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "a5a78743-e155-4ded-854e-822976192097" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.308324] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "a5a78743-e155-4ded-854e-822976192097" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.460032] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance b8e485a2-3c56-4871-be93-59359e465cd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.460148] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 3d32ec82-e623-4bbb-93c2-d39c934b4890 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.460249] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.697563] env[61972]: DEBUG nova.network.neutron [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.721501] env[61972]: DEBUG oslo_concurrency.lockutils [req-e5a21e64-a110-4f07-8325-8136d084cb03 req-2579504a-3946-4677-98ad-422d13a59cbc service nova] Releasing lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.722206] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquired lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.723029] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.963743] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 5aba271f-72bb-4847-8c87-18adda584a74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.200645] env[61972]: INFO nova.compute.manager [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: b8e485a2-3c56-4871-be93-59359e465cd6] Took 1.08 seconds to deallocate network for instance. [ 654.260082] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.360515] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.466526] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9bfde590-fe6c-404d-88ad-9da1763c0870 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.474325] env[61972]: DEBUG nova.compute.manager [req-3bb59e6b-3bf1-4099-a655-fa43c0aec0f7 req-e90d5f00-dd3c-4f8c-8209-861a884bdddb service nova] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Received event network-vif-deleted-9ae03200-5487-4d70-abc9-a634277bfddc {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 654.866022] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Releasing lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.866022] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 654.866022] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.866022] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-032f7a65-4a4b-4c65-a2ef-86cdfbae7144 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.874595] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55f86cb-d0d7-4173-970b-1191a8810857 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.903462] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4 could not be found. [ 654.903862] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.905381] env[61972]: INFO nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 654.905855] env[61972]: DEBUG oslo.service.loopingcall [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.908443] env[61972]: DEBUG nova.compute.manager [-] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 654.908443] env[61972]: DEBUG nova.network.neutron [-] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.933877] env[61972]: DEBUG nova.network.neutron [-] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.970478] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 5c036232-736c-4c34-a2b7-7de517b9cd50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.231391] env[61972]: INFO nova.scheduler.client.report [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Deleted allocations for instance b8e485a2-3c56-4871-be93-59359e465cd6 [ 655.436325] env[61972]: DEBUG nova.network.neutron [-] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.477626] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance eeb44b48-ed08-4f20-9498-b0eed38a00a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.740457] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5313c7bc-37e4-4046-8bf0-09125fe6e96b tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "b8e485a2-3c56-4871-be93-59359e465cd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.411s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.939527] env[61972]: INFO nova.compute.manager [-] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Took 1.03 seconds to deallocate network for instance. [ 655.941804] env[61972]: DEBUG nova.compute.claims [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 655.942017] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.981139] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 503419e5-ae32-49d4-bc41-838fb3c9437e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.243658] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 656.484025] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 5300907c-d589-4ccf-a9c5-4a6bd819783b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.764366] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.825519] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquiring lock "67ecabfd-4efc-4e1c-a708-107197cfd018" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.827049] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Lock "67ecabfd-4efc-4e1c-a708-107197cfd018" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.988168] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 92b06621-cdaa-4723-b339-c0f698897d24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.491098] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 50e4d9d3-a17b-4bb2-9816-bb44f269370e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.994072] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 22634f52-c696-417b-bfe9-0a7ca62aad40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.497435] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 2ba9f652-c274-4d79-84a2-ad1384c99b91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.000985] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.505100] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e0d51c99-1916-4d66-a141-dfa5d4357174 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.006642] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance a978943b-afd3-44f4-b6c1-5a72dda8ca35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.510055] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e0735ee2-0a9d-4291-8465-b644816bf8e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.012712] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 56488ac6-c94b-4b40-9cad-b0c36a3d293e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.519468] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 2725d6ed-89d9-479f-b6ee-d16523e0abab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.022959] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 2b0039dd-1219-465d-beb8-0262e0e40029 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.526020] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 7801858d-bc2a-466e-a6f2-a8c6b6ff4705 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.029617] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance dc5ef08a-8692-4274-84df-7c2923099249 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.532860] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance bc10dded-e669-4fdb-9f5b-cc6abc3a37c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.035892] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 036a2dfc-615d-410a-8a3f-32de621879c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.538580] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 6cda8874-6af5-490a-b9a2-323992265eb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.041925] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 5b7223bd-66f3-44ec-b3bc-e9072eca515e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.545177] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance dab76349-85ba-4513-afa7-d9a33da1b1fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.048227] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance b986f147-a782-467c-92d1-bffb6a50c450 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.550997] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 21c83740-56b6-4cc8-b97b-2b7a00380b91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.054174] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 479b311e-e027-4724-bd8b-dffa8903b538 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.556888] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 47bd9677-375a-413b-a5c5-989d491adec9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.061020] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance a5a78743-e155-4ded-854e-822976192097 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.061020] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 668.061020] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 668.433164] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b1f605-f9cb-4107-8de7-94412f74f475 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.440761] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa084026-693a-4fcd-85dd-35074df33915 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.469427] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b26729-431f-4ec5-868b-66576a8728e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.476867] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938dcc2b-2355-4a1f-ab31-e5da24a94d6d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.490061] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.992896] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 669.497868] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 669.498140] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 17.573s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.498419] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.113s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.500017] env[61972]: INFO nova.compute.claims [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.852586] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e811b8e-5ea6-4b3a-92e4-2ba84acb2797 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.859962] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b68374-7318-4a73-98df-cf9592bfdbca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.889099] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ea9b4d-7b0c-412f-a0cd-b27d4898bb39 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.896162] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e50d0c-365c-4018-b521-757a65f22479 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.908666] env[61972]: DEBUG nova.compute.provider_tree [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.411382] env[61972]: DEBUG nova.scheduler.client.report [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 671.917519] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.918089] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 671.920919] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.446s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.922502] env[61972]: INFO nova.compute.claims [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.427185] env[61972]: DEBUG nova.compute.utils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 672.432209] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 672.432375] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 672.497567] env[61972]: DEBUG nova.policy [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f4a42241a4a4abcaf2e718eb02b9b20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '193ea940afde48b1ad27cc7713ae4213', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 672.933687] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 672.940558] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Successfully created port: dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.307904] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aeefcf-9a4e-4132-aa48-fa0433f17c2b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.315674] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f3810c-1614-4679-af78-67d6c596f193 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.344964] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b765bc-438d-4bd0-8a40-84eec428a097 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.352921] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e477178-e7e9-4fa5-a9ba-084ec77f9aee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.365156] env[61972]: DEBUG nova.compute.provider_tree [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.872022] env[61972]: DEBUG nova.scheduler.client.report [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 673.946210] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 673.975076] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 673.975076] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 673.975076] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 673.975273] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 673.975304] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 673.975428] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 673.975633] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 673.975788] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 673.975948] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 673.976122] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 673.976305] env[61972]: DEBUG nova.virt.hardware [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 673.977185] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7618df07-cabb-4525-8c86-4adf47de4988 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.985513] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765b5132-72ce-427a-b847-ed593c31cfce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.232012] env[61972]: DEBUG nova.compute.manager [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Received event network-changed-dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 674.232322] env[61972]: DEBUG nova.compute.manager [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Refreshing instance network info cache due to event network-changed-dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 674.232457] env[61972]: DEBUG oslo_concurrency.lockutils [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] Acquiring lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.232597] env[61972]: DEBUG oslo_concurrency.lockutils [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] Acquired lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.232751] env[61972]: DEBUG nova.network.neutron [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Refreshing network info cache for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.345781] env[61972]: ERROR nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 674.345781] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 674.345781] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 674.345781] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 674.345781] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.345781] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.345781] env[61972]: ERROR nova.compute.manager raise self.value [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 674.345781] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 674.345781] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.345781] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 674.346521] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.346521] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 674.346521] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 674.346521] env[61972]: ERROR nova.compute.manager [ 674.346521] env[61972]: Traceback (most recent call last): [ 674.346521] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 674.346521] env[61972]: listener.cb(fileno) [ 674.346521] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.346521] env[61972]: result = function(*args, **kwargs) [ 674.346521] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 674.346521] env[61972]: return func(*args, **kwargs) [ 674.346521] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 674.346521] env[61972]: raise e [ 674.346521] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 674.346521] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 674.346521] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 674.346521] env[61972]: created_port_ids = self._update_ports_for_instance( [ 674.346521] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 674.346521] env[61972]: with excutils.save_and_reraise_exception(): [ 674.346521] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.346521] env[61972]: self.force_reraise() [ 674.346521] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.346521] env[61972]: raise self.value [ 674.346521] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 674.346521] env[61972]: updated_port = self._update_port( [ 674.346521] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.346521] env[61972]: _ensure_no_port_binding_failure(port) [ 674.346521] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.346521] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 674.347786] env[61972]: nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 674.347786] env[61972]: Removing descriptor: 19 [ 674.347786] env[61972]: ERROR nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Traceback (most recent call last): [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] yield resources [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self.driver.spawn(context, instance, image_meta, [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 674.347786] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] vm_ref = self.build_virtual_machine(instance, [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] vif_infos = vmwarevif.get_vif_info(self._session, [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] for vif in network_info: [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return self._sync_wrapper(fn, *args, **kwargs) [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self.wait() [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self[:] = self._gt.wait() [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return self._exit_event.wait() [ 674.348147] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] result = hub.switch() [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return self.greenlet.switch() [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] result = function(*args, **kwargs) [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return func(*args, **kwargs) [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] raise e [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] nwinfo = self.network_api.allocate_for_instance( [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 674.348480] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] created_port_ids = self._update_ports_for_instance( [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] with excutils.save_and_reraise_exception(): [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self.force_reraise() [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] raise self.value [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] updated_port = self._update_port( [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] _ensure_no_port_binding_failure(port) [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.348857] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] raise exception.PortBindingFailed(port_id=port['id']) [ 674.349332] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 674.349332] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] [ 674.349332] env[61972]: INFO nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Terminating instance [ 674.374774] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.375289] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 674.378206] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.825s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.379597] env[61972]: INFO nova.compute.claims [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.749699] env[61972]: DEBUG nova.network.neutron [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.851859] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquiring lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.864083] env[61972]: DEBUG nova.network.neutron [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.883864] env[61972]: DEBUG nova.compute.utils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 674.885457] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 674.885824] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 674.924286] env[61972]: DEBUG nova.policy [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc906ad0bdfc4e21bce37c53548ac708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c1334ef1cbd4689a365a0375582f252', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 675.304760] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Successfully created port: 753f3805-c146-4136-9b57-ec2cd4660667 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.367374] env[61972]: DEBUG oslo_concurrency.lockutils [req-31d83077-d7e8-4a7b-a147-69fbfae832ca req-80d9d7c5-1b23-4272-b040-0966a1b88d43 service nova] Releasing lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.367783] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquired lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.367967] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 675.385798] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 675.785873] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18142bea-fc2a-4c34-9db7-f957cfd92442 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.793947] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15d1b9e-10e0-49c1-bc13-9d6330fdf47b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.825702] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7aaa9c-cd1c-4c2b-af25-14b1803411e3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.832937] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e4e44a-fa63-437f-b31b-cc1b073e962c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.845708] env[61972]: DEBUG nova.compute.provider_tree [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.901243] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.033866] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.340804] env[61972]: DEBUG nova.compute.manager [req-feb6030a-8ea0-488f-a5a9-7a82eb16981b req-0f55a8be-4d5a-4cd5-a613-634aca30890a service nova] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Received event network-vif-deleted-dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 676.351021] env[61972]: DEBUG nova.scheduler.client.report [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 676.395201] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 676.425226] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.425226] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.425226] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.425226] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.425385] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.425385] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.425609] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.425940] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.426287] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.426738] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.428351] env[61972]: DEBUG nova.virt.hardware [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.428351] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65381d6-23f7-4973-89bd-3e78e9721656 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.443127] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7d12ab-453a-421d-987d-85bee4d05140 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.537346] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Releasing lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.537796] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 676.537983] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 676.538324] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d05f3864-81bd-42fb-a9b8-5391b1edad0b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.550910] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65344431-5dfd-4bf0-a3f5-dc899f768cc6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.573310] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5aba271f-72bb-4847-8c87-18adda584a74 could not be found. [ 676.573614] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 676.573705] env[61972]: INFO nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Took 0.04 seconds to destroy the instance on the hypervisor. [ 676.573939] env[61972]: DEBUG oslo.service.loopingcall [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 676.574173] env[61972]: DEBUG nova.compute.manager [-] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 676.574266] env[61972]: DEBUG nova.network.neutron [-] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 676.576447] env[61972]: ERROR nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 676.576447] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 676.576447] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 676.576447] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 676.576447] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.576447] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.576447] env[61972]: ERROR nova.compute.manager raise self.value [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 676.576447] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 676.576447] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.576447] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 676.576950] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.576950] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 676.576950] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 676.576950] env[61972]: ERROR nova.compute.manager [ 676.576950] env[61972]: Traceback (most recent call last): [ 676.576950] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 676.576950] env[61972]: listener.cb(fileno) [ 676.576950] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.576950] env[61972]: result = function(*args, **kwargs) [ 676.576950] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 676.576950] env[61972]: return func(*args, **kwargs) [ 676.576950] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 676.576950] env[61972]: raise e [ 676.576950] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 676.576950] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 676.576950] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 676.576950] env[61972]: created_port_ids = self._update_ports_for_instance( [ 676.576950] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 676.576950] env[61972]: with excutils.save_and_reraise_exception(): [ 676.576950] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.576950] env[61972]: self.force_reraise() [ 676.576950] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.576950] env[61972]: raise self.value [ 676.576950] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 676.576950] env[61972]: updated_port = self._update_port( [ 676.576950] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.576950] env[61972]: _ensure_no_port_binding_failure(port) [ 676.576950] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.576950] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 676.577726] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 676.577726] env[61972]: Removing descriptor: 19 [ 676.577726] env[61972]: ERROR nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Traceback (most recent call last): [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] yield resources [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self.driver.spawn(context, instance, image_meta, [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self._vmops.spawn(context, instance, image_meta, injected_files, [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 676.577726] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] vm_ref = self.build_virtual_machine(instance, [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] vif_infos = vmwarevif.get_vif_info(self._session, [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] for vif in network_info: [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return self._sync_wrapper(fn, *args, **kwargs) [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self.wait() [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self[:] = self._gt.wait() [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return self._exit_event.wait() [ 676.578209] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] result = hub.switch() [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return self.greenlet.switch() [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] result = function(*args, **kwargs) [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return func(*args, **kwargs) [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] raise e [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] nwinfo = self.network_api.allocate_for_instance( [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 676.578575] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] created_port_ids = self._update_ports_for_instance( [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] with excutils.save_and_reraise_exception(): [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self.force_reraise() [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] raise self.value [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] updated_port = self._update_port( [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] _ensure_no_port_binding_failure(port) [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.579239] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] raise exception.PortBindingFailed(port_id=port['id']) [ 676.579767] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 676.579767] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] [ 676.579767] env[61972]: INFO nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Terminating instance [ 676.592200] env[61972]: DEBUG nova.network.neutron [-] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.853771] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.854984] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.386s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.856502] env[61972]: INFO nova.compute.claims [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.083517] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquiring lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.083691] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquired lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.083865] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.093877] env[61972]: DEBUG nova.network.neutron [-] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.360126] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquiring lock "2d2aa0b2-37aa-4284-9e60-cdfb5e069687" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.360436] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "2d2aa0b2-37aa-4284-9e60-cdfb5e069687" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.596369] env[61972]: INFO nova.compute.manager [-] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Took 1.02 seconds to deallocate network for instance. [ 677.600815] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.602761] env[61972]: DEBUG nova.compute.claims [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 677.603959] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.713174] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.863399] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "2d2aa0b2-37aa-4284-9e60-cdfb5e069687" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.863969] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 678.200579] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da8de53-2373-4b79-9cea-61f6061a0f86 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.207836] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2f1645-586c-4334-bf46-589512166208 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.238251] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Releasing lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.238629] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 678.238816] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.239257] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da92b468-f463-4e84-9c2f-c67ec940140e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.241430] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc18190-3203-4ad8-82d1-47a426b76d17 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.249342] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27e9ad6-8ad1-484c-8aac-639e3924311f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.255415] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1dc66ea-54e2-4ced-9227-78407b81f728 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.276847] env[61972]: DEBUG nova.compute.provider_tree [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.282209] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9bfde590-fe6c-404d-88ad-9da1763c0870 could not be found. [ 678.282487] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 678.282589] env[61972]: INFO nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Took 0.04 seconds to destroy the instance on the hypervisor. [ 678.282771] env[61972]: DEBUG oslo.service.loopingcall [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 678.283248] env[61972]: DEBUG nova.compute.manager [-] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 678.283343] env[61972]: DEBUG nova.network.neutron [-] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 678.300530] env[61972]: DEBUG nova.network.neutron [-] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.371577] env[61972]: DEBUG nova.compute.utils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 678.372894] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 678.373095] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.404516] env[61972]: DEBUG nova.compute.manager [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Received event network-changed-753f3805-c146-4136-9b57-ec2cd4660667 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 678.404750] env[61972]: DEBUG nova.compute.manager [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Refreshing instance network info cache due to event network-changed-753f3805-c146-4136-9b57-ec2cd4660667. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 678.405027] env[61972]: DEBUG oslo_concurrency.lockutils [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] Acquiring lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.405129] env[61972]: DEBUG oslo_concurrency.lockutils [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] Acquired lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.405271] env[61972]: DEBUG nova.network.neutron [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Refreshing network info cache for port 753f3805-c146-4136-9b57-ec2cd4660667 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 678.418329] env[61972]: DEBUG nova.policy [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa1286fe09cf48d19a1d528ac1b812fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02baa466ae264d89896a3049ab5767be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 678.774079] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Successfully created port: be72ce58-4074-47db-9ae6-d521a1242017 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.785516] env[61972]: DEBUG nova.scheduler.client.report [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 678.803010] env[61972]: DEBUG nova.network.neutron [-] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.875702] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 678.935018] env[61972]: DEBUG nova.network.neutron [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.239678] env[61972]: DEBUG nova.network.neutron [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.289571] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.290113] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 679.293192] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.842s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.296207] env[61972]: INFO nova.compute.claims [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.309741] env[61972]: INFO nova.compute.manager [-] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Took 1.03 seconds to deallocate network for instance. [ 679.312288] env[61972]: DEBUG nova.compute.claims [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 679.312462] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.746375] env[61972]: DEBUG oslo_concurrency.lockutils [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] Releasing lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.746655] env[61972]: DEBUG nova.compute.manager [req-fefd3017-ffeb-4df5-bea9-e0624d931f98 req-39daa0ee-5e58-4f66-a93e-aaba2dc0559a service nova] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Received event network-vif-deleted-753f3805-c146-4136-9b57-ec2cd4660667 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 679.795569] env[61972]: DEBUG nova.compute.utils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 679.796946] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 679.797131] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 679.886400] env[61972]: DEBUG nova.policy [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e11ef5e6a8a34d10a106be27e05d3511', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a44012a49e94a00b3b8dffe9cd66842', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 679.888744] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 679.915431] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 679.915704] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 679.915859] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.916081] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 679.916190] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.916396] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 679.916533] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 679.916684] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 679.917071] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 679.917071] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 679.917190] env[61972]: DEBUG nova.virt.hardware [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.918068] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fcac97e-7f37-4739-b81c-70a841e203f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.930868] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605c9e51-66b8-4c73-9823-6841d972318c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.304229] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 680.446398] env[61972]: DEBUG nova.compute.manager [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Received event network-changed-be72ce58-4074-47db-9ae6-d521a1242017 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 680.446584] env[61972]: DEBUG nova.compute.manager [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Refreshing instance network info cache due to event network-changed-be72ce58-4074-47db-9ae6-d521a1242017. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 680.446808] env[61972]: DEBUG oslo_concurrency.lockutils [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] Acquiring lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.446927] env[61972]: DEBUG oslo_concurrency.lockutils [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] Acquired lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.447092] env[61972]: DEBUG nova.network.neutron [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Refreshing network info cache for port be72ce58-4074-47db-9ae6-d521a1242017 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.448694] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Successfully created port: 04b06b41-42b5-4d04-ba77-d7a56bbce454 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.503045] env[61972]: ERROR nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 680.503045] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 680.503045] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.503045] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.503045] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.503045] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.503045] env[61972]: ERROR nova.compute.manager raise self.value [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.503045] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 680.503045] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.503045] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 680.503537] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.503537] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 680.503537] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 680.503537] env[61972]: ERROR nova.compute.manager [ 680.503537] env[61972]: Traceback (most recent call last): [ 680.503537] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 680.503537] env[61972]: listener.cb(fileno) [ 680.503537] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.503537] env[61972]: result = function(*args, **kwargs) [ 680.503537] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.503537] env[61972]: return func(*args, **kwargs) [ 680.503537] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 680.503537] env[61972]: raise e [ 680.503537] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 680.503537] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 680.503537] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.503537] env[61972]: created_port_ids = self._update_ports_for_instance( [ 680.503537] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.503537] env[61972]: with excutils.save_and_reraise_exception(): [ 680.503537] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.503537] env[61972]: self.force_reraise() [ 680.503537] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.503537] env[61972]: raise self.value [ 680.503537] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.503537] env[61972]: updated_port = self._update_port( [ 680.503537] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.503537] env[61972]: _ensure_no_port_binding_failure(port) [ 680.503537] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.503537] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 680.504374] env[61972]: nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 680.504374] env[61972]: Removing descriptor: 21 [ 680.504374] env[61972]: ERROR nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Traceback (most recent call last): [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] yield resources [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self.driver.spawn(context, instance, image_meta, [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.504374] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] vm_ref = self.build_virtual_machine(instance, [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] for vif in network_info: [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return self._sync_wrapper(fn, *args, **kwargs) [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self.wait() [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self[:] = self._gt.wait() [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return self._exit_event.wait() [ 680.504731] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] result = hub.switch() [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return self.greenlet.switch() [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] result = function(*args, **kwargs) [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return func(*args, **kwargs) [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] raise e [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] nwinfo = self.network_api.allocate_for_instance( [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.505116] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] created_port_ids = self._update_ports_for_instance( [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] with excutils.save_and_reraise_exception(): [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self.force_reraise() [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] raise self.value [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] updated_port = self._update_port( [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] _ensure_no_port_binding_failure(port) [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.505541] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] raise exception.PortBindingFailed(port_id=port['id']) [ 680.505878] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 680.505878] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] [ 680.505878] env[61972]: INFO nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Terminating instance [ 680.719981] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5675d3-439e-471f-8f32-85e8eeea4e64 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.731690] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b003ea33-2e01-456e-8326-f65936bb99ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.763786] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e912daa2-dc3b-49de-a5a2-d64648a3b67c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.771415] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b246fd-d05b-4a92-a632-d127a6d1e84e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.787970] env[61972]: DEBUG nova.compute.provider_tree [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.988821] env[61972]: DEBUG nova.network.neutron [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.007303] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquiring lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.194595] env[61972]: DEBUG nova.network.neutron [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.290943] env[61972]: DEBUG nova.scheduler.client.report [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 681.315334] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 681.340377] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.340637] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.340790] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.340961] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.341122] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.341263] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.341585] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.341762] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.341929] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.342380] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.343065] env[61972]: DEBUG nova.virt.hardware [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.343774] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a0135b-9cb1-4817-9e6f-5ce0d9b41558 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.353513] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1f6492-31b1-4e34-a1ce-f1647c75506d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.696745] env[61972]: DEBUG oslo_concurrency.lockutils [req-e49cad96-d833-4214-a564-821762269ed7 req-268dc734-61dc-4280-961e-7ab1af95cae3 service nova] Releasing lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.697196] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquired lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.697380] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.787726] env[61972]: ERROR nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 681.787726] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 681.787726] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.787726] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.787726] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.787726] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.787726] env[61972]: ERROR nova.compute.manager raise self.value [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.787726] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 681.787726] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.787726] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 681.788407] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.788407] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 681.788407] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 681.788407] env[61972]: ERROR nova.compute.manager [ 681.788407] env[61972]: Traceback (most recent call last): [ 681.788407] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 681.788407] env[61972]: listener.cb(fileno) [ 681.788407] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.788407] env[61972]: result = function(*args, **kwargs) [ 681.788407] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 681.788407] env[61972]: return func(*args, **kwargs) [ 681.788407] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 681.788407] env[61972]: raise e [ 681.788407] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 681.788407] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 681.788407] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.788407] env[61972]: created_port_ids = self._update_ports_for_instance( [ 681.788407] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.788407] env[61972]: with excutils.save_and_reraise_exception(): [ 681.788407] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.788407] env[61972]: self.force_reraise() [ 681.788407] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.788407] env[61972]: raise self.value [ 681.788407] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.788407] env[61972]: updated_port = self._update_port( [ 681.788407] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.788407] env[61972]: _ensure_no_port_binding_failure(port) [ 681.788407] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.788407] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 681.789287] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 681.789287] env[61972]: Removing descriptor: 19 [ 681.789287] env[61972]: ERROR nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Traceback (most recent call last): [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] yield resources [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self.driver.spawn(context, instance, image_meta, [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.789287] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] vm_ref = self.build_virtual_machine(instance, [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] for vif in network_info: [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return self._sync_wrapper(fn, *args, **kwargs) [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self.wait() [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self[:] = self._gt.wait() [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return self._exit_event.wait() [ 681.789631] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] result = hub.switch() [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return self.greenlet.switch() [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] result = function(*args, **kwargs) [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return func(*args, **kwargs) [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] raise e [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] nwinfo = self.network_api.allocate_for_instance( [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.790018] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] created_port_ids = self._update_ports_for_instance( [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] with excutils.save_and_reraise_exception(): [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self.force_reraise() [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] raise self.value [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] updated_port = self._update_port( [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] _ensure_no_port_binding_failure(port) [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.790390] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] raise exception.PortBindingFailed(port_id=port['id']) [ 681.790730] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 681.790730] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] [ 681.790730] env[61972]: INFO nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Terminating instance [ 681.798283] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.798730] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 681.802026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.533s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.215547] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.292553] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquiring lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.292795] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquired lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.292980] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.308752] env[61972]: DEBUG nova.compute.utils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 682.310880] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 682.311097] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 682.359695] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.389231] env[61972]: DEBUG nova.policy [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '664c947be4534cd683227b0db816fef2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29baf785b51546969333d685f6912982', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 682.507151] env[61972]: DEBUG nova.compute.manager [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Received event network-vif-deleted-be72ce58-4074-47db-9ae6-d521a1242017 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 682.507151] env[61972]: DEBUG nova.compute.manager [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Received event network-changed-04b06b41-42b5-4d04-ba77-d7a56bbce454 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 682.507535] env[61972]: DEBUG nova.compute.manager [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Refreshing instance network info cache due to event network-changed-04b06b41-42b5-4d04-ba77-d7a56bbce454. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 682.507535] env[61972]: DEBUG oslo_concurrency.lockutils [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] Acquiring lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.744510] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0101be-e4fa-4308-a0c8-1db7bdaaf54b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.752032] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5e4b17-d657-4f8d-a285-b166bcbf317d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.786597] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76393211-81be-4d4e-b30e-fcd8d01e2cc4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.793145] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf416eba-6936-4ea3-97b2-bc0d4370df7e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.808264] env[61972]: DEBUG nova.compute.provider_tree [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.814595] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 682.821798] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.865322] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Releasing lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.865739] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 682.865926] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.866394] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7ae43ee-4330-432f-bc42-9cc307f57443 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.876228] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7023e91b-64eb-4ead-8705-047072372ae0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.889939] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Successfully created port: 7fbdef94-9fb6-4093-8c58-de0936af0d85 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.901120] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5c036232-736c-4c34-a2b7-7de517b9cd50 could not be found. [ 682.901120] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 682.901120] env[61972]: INFO nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Took 0.03 seconds to destroy the instance on the hypervisor. [ 682.901120] env[61972]: DEBUG oslo.service.loopingcall [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.901120] env[61972]: DEBUG nova.compute.manager [-] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 682.901120] env[61972]: DEBUG nova.network.neutron [-] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.914593] env[61972]: DEBUG nova.network.neutron [-] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.116613] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.311455] env[61972]: DEBUG nova.scheduler.client.report [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 683.420175] env[61972]: DEBUG nova.network.neutron [-] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.619826] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Releasing lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.620386] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 683.620578] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 683.620890] env[61972]: DEBUG oslo_concurrency.lockutils [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] Acquired lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.621071] env[61972]: DEBUG nova.network.neutron [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Refreshing network info cache for port 04b06b41-42b5-4d04-ba77-d7a56bbce454 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.622263] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6447bced-1f0a-4824-b463-0d6c0aa378d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.632046] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75caadb-4092-4f5c-a310-af27c7537a8a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.654065] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eeb44b48-ed08-4f20-9498-b0eed38a00a2 could not be found. [ 683.654307] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.654462] env[61972]: INFO nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 683.654700] env[61972]: DEBUG oslo.service.loopingcall [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.654916] env[61972]: DEBUG nova.compute.manager [-] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 683.655017] env[61972]: DEBUG nova.network.neutron [-] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.675781] env[61972]: DEBUG nova.network.neutron [-] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.817916] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.016s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.818211] env[61972]: ERROR nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Traceback (most recent call last): [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self.driver.spawn(context, instance, image_meta, [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self._vmops.spawn(context, instance, image_meta, injected_files, [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] vm_ref = self.build_virtual_machine(instance, [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] vif_infos = vmwarevif.get_vif_info(self._session, [ 683.818211] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] for vif in network_info: [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return self._sync_wrapper(fn, *args, **kwargs) [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self.wait() [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self[:] = self._gt.wait() [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return self._exit_event.wait() [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] result = hub.switch() [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 683.818506] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return self.greenlet.switch() [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] result = function(*args, **kwargs) [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] return func(*args, **kwargs) [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] raise e [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] nwinfo = self.network_api.allocate_for_instance( [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] created_port_ids = self._update_ports_for_instance( [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] with excutils.save_and_reraise_exception(): [ 683.818820] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] self.force_reraise() [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] raise self.value [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] updated_port = self._update_port( [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] _ensure_no_port_binding_failure(port) [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] raise exception.PortBindingFailed(port_id=port['id']) [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] nova.exception.PortBindingFailed: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. [ 683.819140] env[61972]: ERROR nova.compute.manager [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] [ 683.819411] env[61972]: DEBUG nova.compute.utils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 683.821021] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Build of instance 3d32ec82-e623-4bbb-93c2-d39c934b4890 was re-scheduled: Binding failed for port 2cf67927-0ab5-4a4f-8ae0-29554256ae47, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 683.822018] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 683.822018] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.822018] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquired lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.822018] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.822939] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.700s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.824472] env[61972]: INFO nova.compute.claims [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.827606] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 683.873047] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.873047] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.873047] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.873329] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.873329] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.873329] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.873329] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.873329] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.873504] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.873504] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.873504] env[61972]: DEBUG nova.virt.hardware [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.874808] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b4a647-365c-4103-87f7-3f2cef225a12 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.882561] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2ef00e-15a6-47b0-a049-e9d8c5e381d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.922271] env[61972]: INFO nova.compute.manager [-] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Took 1.02 seconds to deallocate network for instance. [ 683.924527] env[61972]: DEBUG nova.compute.claims [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 683.924707] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.144114] env[61972]: DEBUG nova.network.neutron [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.177753] env[61972]: DEBUG nova.network.neutron [-] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.310113] env[61972]: ERROR nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 684.310113] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 684.310113] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.310113] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.310113] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.310113] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.310113] env[61972]: ERROR nova.compute.manager raise self.value [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.310113] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 684.310113] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.310113] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 684.310657] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.310657] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 684.310657] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 684.310657] env[61972]: ERROR nova.compute.manager [ 684.310657] env[61972]: Traceback (most recent call last): [ 684.310657] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 684.310657] env[61972]: listener.cb(fileno) [ 684.310657] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.310657] env[61972]: result = function(*args, **kwargs) [ 684.310657] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.310657] env[61972]: return func(*args, **kwargs) [ 684.310657] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 684.310657] env[61972]: raise e [ 684.310657] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 684.310657] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 684.310657] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.310657] env[61972]: created_port_ids = self._update_ports_for_instance( [ 684.310657] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.310657] env[61972]: with excutils.save_and_reraise_exception(): [ 684.310657] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.310657] env[61972]: self.force_reraise() [ 684.310657] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.310657] env[61972]: raise self.value [ 684.310657] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.310657] env[61972]: updated_port = self._update_port( [ 684.310657] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.310657] env[61972]: _ensure_no_port_binding_failure(port) [ 684.310657] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.310657] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 684.311650] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 684.311650] env[61972]: Removing descriptor: 21 [ 684.311650] env[61972]: ERROR nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Traceback (most recent call last): [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] yield resources [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self.driver.spawn(context, instance, image_meta, [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.311650] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] vm_ref = self.build_virtual_machine(instance, [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] for vif in network_info: [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return self._sync_wrapper(fn, *args, **kwargs) [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self.wait() [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self[:] = self._gt.wait() [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return self._exit_event.wait() [ 684.312053] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] result = hub.switch() [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return self.greenlet.switch() [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] result = function(*args, **kwargs) [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return func(*args, **kwargs) [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] raise e [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] nwinfo = self.network_api.allocate_for_instance( [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.312441] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] created_port_ids = self._update_ports_for_instance( [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] with excutils.save_and_reraise_exception(): [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self.force_reraise() [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] raise self.value [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] updated_port = self._update_port( [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] _ensure_no_port_binding_failure(port) [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.316280] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] raise exception.PortBindingFailed(port_id=port['id']) [ 684.316835] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 684.316835] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] [ 684.316835] env[61972]: INFO nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Terminating instance [ 684.337213] env[61972]: DEBUG nova.network.neutron [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.347706] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.429016] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.528539] env[61972]: DEBUG nova.compute.manager [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Received event network-changed-7fbdef94-9fb6-4093-8c58-de0936af0d85 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 684.528686] env[61972]: DEBUG nova.compute.manager [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Refreshing instance network info cache due to event network-changed-7fbdef94-9fb6-4093-8c58-de0936af0d85. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 684.528851] env[61972]: DEBUG oslo_concurrency.lockutils [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] Acquiring lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.528992] env[61972]: DEBUG oslo_concurrency.lockutils [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] Acquired lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.529449] env[61972]: DEBUG nova.network.neutron [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Refreshing network info cache for port 7fbdef94-9fb6-4093-8c58-de0936af0d85 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.680402] env[61972]: INFO nova.compute.manager [-] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Took 1.03 seconds to deallocate network for instance. [ 684.682747] env[61972]: DEBUG nova.compute.claims [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 684.682918] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.815605] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquiring lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.838308] env[61972]: DEBUG oslo_concurrency.lockutils [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] Releasing lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.838742] env[61972]: DEBUG nova.compute.manager [req-7d86307b-dd64-4086-92fa-5fd94ef8f98b req-255b1e01-36a3-4314-a58c-ff126525ac80 service nova] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Received event network-vif-deleted-04b06b41-42b5-4d04-ba77-d7a56bbce454 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 684.932088] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Releasing lock "refresh_cache-3d32ec82-e623-4bbb-93c2-d39c934b4890" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.932344] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 684.932504] env[61972]: DEBUG nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 684.932666] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.948117] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.046811] env[61972]: DEBUG nova.network.neutron [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.114440] env[61972]: DEBUG nova.network.neutron [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.167069] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2114346a-bd3b-46a5-9732-4e777282acd6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.174447] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9587e2-73d5-4159-9209-3a5c911cb52b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.203598] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461db00f-4e14-4a67-a6dc-dfc25bcf3069 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.210071] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a401e95-e564-4f71-b292-5c4a2615275e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.223835] env[61972]: DEBUG nova.compute.provider_tree [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.451181] env[61972]: DEBUG nova.network.neutron [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.617028] env[61972]: DEBUG oslo_concurrency.lockutils [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] Releasing lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.617028] env[61972]: DEBUG nova.compute.manager [req-17e07a7e-9878-4f43-8b62-19570a46e6b8 req-0318f76e-2504-4640-b438-394b4d30c183 service nova] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Received event network-vif-deleted-7fbdef94-9fb6-4093-8c58-de0936af0d85 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 685.617319] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquired lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.617490] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.727664] env[61972]: DEBUG nova.scheduler.client.report [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 685.953579] env[61972]: INFO nova.compute.manager [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 3d32ec82-e623-4bbb-93c2-d39c934b4890] Took 1.02 seconds to deallocate network for instance. [ 686.133643] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.197453] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.237185] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.237654] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 686.240026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.126s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.241392] env[61972]: INFO nova.compute.claims [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.699745] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Releasing lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.700172] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 686.700370] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.701032] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e274206-aba5-4d8a-afd2-01e2670a087b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.709106] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4293376-2e7c-4a3f-8a32-3337c9ec427f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.732212] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 503419e5-ae32-49d4-bc41-838fb3c9437e could not be found. [ 686.732426] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.732615] env[61972]: INFO nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 686.732912] env[61972]: DEBUG oslo.service.loopingcall [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.733157] env[61972]: DEBUG nova.compute.manager [-] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 686.733257] env[61972]: DEBUG nova.network.neutron [-] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.745129] env[61972]: DEBUG nova.compute.utils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 686.748355] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 686.748494] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 686.750746] env[61972]: DEBUG nova.network.neutron [-] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.800836] env[61972]: DEBUG nova.policy [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb6cad3d802a4037871454d8e7472b32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8ceeb806fd54401905315e243f5f53c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 686.987139] env[61972]: INFO nova.scheduler.client.report [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Deleted allocations for instance 3d32ec82-e623-4bbb-93c2-d39c934b4890 [ 687.170075] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Successfully created port: 5c505086-4186-4f14-a033-2f16f1d406b8 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.249149] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 687.259074] env[61972]: DEBUG nova.network.neutron [-] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.496534] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0cbcdc3e-6a7f-4730-8961-cd9b33be31fc tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "3d32ec82-e623-4bbb-93c2-d39c934b4890" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.433s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.642848] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da37431c-f4e6-4f88-a3a7-732bee58efdc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.652091] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ecb8e5-c19b-4777-a911-a50f824d81b4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.682726] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad37aee4-244d-47ed-98ce-943845d2679b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.690476] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f82fd86-b6f6-4f38-850f-ab51b2c6268b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.706052] env[61972]: DEBUG nova.compute.provider_tree [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.765673] env[61972]: INFO nova.compute.manager [-] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Took 1.03 seconds to deallocate network for instance. [ 687.770647] env[61972]: DEBUG nova.compute.claims [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 687.770853] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.000597] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 688.211336] env[61972]: DEBUG nova.scheduler.client.report [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 688.222740] env[61972]: DEBUG nova.compute.manager [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Received event network-changed-5c505086-4186-4f14-a033-2f16f1d406b8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 688.222934] env[61972]: DEBUG nova.compute.manager [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Refreshing instance network info cache due to event network-changed-5c505086-4186-4f14-a033-2f16f1d406b8. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 688.223160] env[61972]: DEBUG oslo_concurrency.lockutils [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] Acquiring lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.223298] env[61972]: DEBUG oslo_concurrency.lockutils [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] Acquired lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.223452] env[61972]: DEBUG nova.network.neutron [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Refreshing network info cache for port 5c505086-4186-4f14-a033-2f16f1d406b8 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 688.261396] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 688.288633] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 688.288895] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 688.289150] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.289345] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 688.289536] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.289711] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 688.289950] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 688.290578] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 688.290578] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 688.290578] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 688.290887] env[61972]: DEBUG nova.virt.hardware [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 688.291643] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb109289-19fd-41e1-bfc4-c63d1adc6a13 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.300563] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24940041-7ccf-4e05-9736-6ddd9ad9f3ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.354038] env[61972]: ERROR nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 688.354038] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 688.354038] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 688.354038] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 688.354038] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 688.354038] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 688.354038] env[61972]: ERROR nova.compute.manager raise self.value [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 688.354038] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 688.354038] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 688.354038] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 688.354551] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 688.354551] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 688.354551] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 688.354551] env[61972]: ERROR nova.compute.manager [ 688.354551] env[61972]: Traceback (most recent call last): [ 688.354551] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 688.354551] env[61972]: listener.cb(fileno) [ 688.354551] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 688.354551] env[61972]: result = function(*args, **kwargs) [ 688.354551] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 688.354551] env[61972]: return func(*args, **kwargs) [ 688.354551] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 688.354551] env[61972]: raise e [ 688.354551] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 688.354551] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 688.354551] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 688.354551] env[61972]: created_port_ids = self._update_ports_for_instance( [ 688.354551] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 688.354551] env[61972]: with excutils.save_and_reraise_exception(): [ 688.354551] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 688.354551] env[61972]: self.force_reraise() [ 688.354551] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 688.354551] env[61972]: raise self.value [ 688.354551] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 688.354551] env[61972]: updated_port = self._update_port( [ 688.354551] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 688.354551] env[61972]: _ensure_no_port_binding_failure(port) [ 688.354551] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 688.354551] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 688.355301] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 688.355301] env[61972]: Removing descriptor: 19 [ 688.355301] env[61972]: ERROR nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Traceback (most recent call last): [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] yield resources [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self.driver.spawn(context, instance, image_meta, [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 688.355301] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] vm_ref = self.build_virtual_machine(instance, [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] vif_infos = vmwarevif.get_vif_info(self._session, [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] for vif in network_info: [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return self._sync_wrapper(fn, *args, **kwargs) [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self.wait() [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self[:] = self._gt.wait() [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return self._exit_event.wait() [ 688.355676] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] result = hub.switch() [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return self.greenlet.switch() [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] result = function(*args, **kwargs) [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return func(*args, **kwargs) [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] raise e [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] nwinfo = self.network_api.allocate_for_instance( [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 688.356079] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] created_port_ids = self._update_ports_for_instance( [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] with excutils.save_and_reraise_exception(): [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self.force_reraise() [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] raise self.value [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] updated_port = self._update_port( [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] _ensure_no_port_binding_failure(port) [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 688.356443] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] raise exception.PortBindingFailed(port_id=port['id']) [ 688.356835] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 688.356835] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] [ 688.356835] env[61972]: INFO nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Terminating instance [ 688.523308] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.716860] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.717070] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 688.719511] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.777s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.740463] env[61972]: DEBUG nova.network.neutron [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.844340] env[61972]: DEBUG nova.network.neutron [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.859244] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquiring lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.224426] env[61972]: DEBUG nova.compute.utils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.228830] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 689.229051] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.295203] env[61972]: DEBUG nova.policy [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6b7c5b037a54c8cbd151ad0f1875f37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbbaa322b60942819cfb147b5201daf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 689.347015] env[61972]: DEBUG oslo_concurrency.lockutils [req-22993eed-2520-40dc-aa35-5384d5bfae58 req-8c92d4dd-419b-49b8-bbf4-ee8dd8351f4e service nova] Releasing lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.347549] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquired lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.347755] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.393026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "49cd5798-1f76-4690-bea7-cebd98a84f5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.393026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "49cd5798-1f76-4690-bea7-cebd98a84f5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.631192] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcadd5c-86bd-4903-b816-95888befddb2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.641229] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550eaada-ff5f-471d-b369-19e6894b6e7f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.672453] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a04d40e-a4be-4057-ab52-e75258e1a2cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.679374] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Successfully created port: 11325322-ae52-4fd2-b017-297605a61bcb {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.682141] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7cb5ce-d046-438f-b7f1-2129b054155b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.695239] env[61972]: DEBUG nova.compute.provider_tree [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.728990] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 689.886984] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.047389] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.198029] env[61972]: DEBUG nova.scheduler.client.report [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 690.309591] env[61972]: DEBUG nova.compute.manager [req-c865aad9-bfe8-40fe-9a89-37f7e9b56152 req-8b6d410e-cf70-448b-894b-12d07e3af0a3 service nova] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Received event network-vif-deleted-5c505086-4186-4f14-a033-2f16f1d406b8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 690.549856] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Releasing lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.550325] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 690.550523] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 690.550822] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a1c6dd7-8ce7-44c1-9399-b090b01ba7c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.559347] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aed0d05-a1fe-442a-bd7a-36f6d09fff1e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.595367] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5300907c-d589-4ccf-a9c5-4a6bd819783b could not be found. [ 690.595629] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.595763] env[61972]: INFO nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 690.596035] env[61972]: DEBUG oslo.service.loopingcall [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.596243] env[61972]: DEBUG nova.compute.manager [-] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 690.596519] env[61972]: DEBUG nova.network.neutron [-] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 690.616427] env[61972]: DEBUG nova.network.neutron [-] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.703292] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.984s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.703803] env[61972]: ERROR nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Traceback (most recent call last): [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self.driver.spawn(context, instance, image_meta, [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] vm_ref = self.build_virtual_machine(instance, [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] vif_infos = vmwarevif.get_vif_info(self._session, [ 690.703803] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] for vif in network_info: [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return self._sync_wrapper(fn, *args, **kwargs) [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self.wait() [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self[:] = self._gt.wait() [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return self._exit_event.wait() [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] result = hub.switch() [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 690.704127] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return self.greenlet.switch() [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] result = function(*args, **kwargs) [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] return func(*args, **kwargs) [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] raise e [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] nwinfo = self.network_api.allocate_for_instance( [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] created_port_ids = self._update_ports_for_instance( [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] with excutils.save_and_reraise_exception(): [ 690.704451] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] self.force_reraise() [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] raise self.value [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] updated_port = self._update_port( [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] _ensure_no_port_binding_failure(port) [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] raise exception.PortBindingFailed(port_id=port['id']) [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] nova.exception.PortBindingFailed: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. [ 690.704824] env[61972]: ERROR nova.compute.manager [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] [ 690.705111] env[61972]: DEBUG nova.compute.utils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 690.707382] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.943s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.709206] env[61972]: INFO nova.compute.claims [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.712086] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Build of instance 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4 was re-scheduled: Binding failed for port 9ae03200-5487-4d70-abc9-a634277bfddc, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 690.712523] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 690.712743] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquiring lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.713071] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Acquired lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.713071] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 690.738383] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 690.766168] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.766479] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.766559] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.766734] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.766875] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.767031] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.767256] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.767413] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.767645] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.767857] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.768040] env[61972]: DEBUG nova.virt.hardware [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.768874] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3865219-039d-4a44-97c4-b22d99ed4c9b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.776671] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38220c7a-08f8-4038-b365-8ccfb6fe54c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.927445] env[61972]: ERROR nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 690.927445] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 690.927445] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.927445] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.927445] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.927445] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.927445] env[61972]: ERROR nova.compute.manager raise self.value [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.927445] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 690.927445] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.927445] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 690.927945] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.927945] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 690.927945] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 690.927945] env[61972]: ERROR nova.compute.manager [ 690.927945] env[61972]: Traceback (most recent call last): [ 690.927945] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 690.927945] env[61972]: listener.cb(fileno) [ 690.927945] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.927945] env[61972]: result = function(*args, **kwargs) [ 690.927945] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 690.927945] env[61972]: return func(*args, **kwargs) [ 690.927945] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 690.927945] env[61972]: raise e [ 690.927945] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 690.927945] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 690.927945] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.927945] env[61972]: created_port_ids = self._update_ports_for_instance( [ 690.927945] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.927945] env[61972]: with excutils.save_and_reraise_exception(): [ 690.927945] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.927945] env[61972]: self.force_reraise() [ 690.927945] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.927945] env[61972]: raise self.value [ 690.927945] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.927945] env[61972]: updated_port = self._update_port( [ 690.927945] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.927945] env[61972]: _ensure_no_port_binding_failure(port) [ 690.927945] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.927945] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 690.928794] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 690.928794] env[61972]: Removing descriptor: 19 [ 690.928794] env[61972]: ERROR nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Traceback (most recent call last): [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] yield resources [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self.driver.spawn(context, instance, image_meta, [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 690.928794] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] vm_ref = self.build_virtual_machine(instance, [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] vif_infos = vmwarevif.get_vif_info(self._session, [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] for vif in network_info: [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return self._sync_wrapper(fn, *args, **kwargs) [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self.wait() [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self[:] = self._gt.wait() [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return self._exit_event.wait() [ 690.929234] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] result = hub.switch() [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return self.greenlet.switch() [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] result = function(*args, **kwargs) [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return func(*args, **kwargs) [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] raise e [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] nwinfo = self.network_api.allocate_for_instance( [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.929630] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] created_port_ids = self._update_ports_for_instance( [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] with excutils.save_and_reraise_exception(): [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self.force_reraise() [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] raise self.value [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] updated_port = self._update_port( [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] _ensure_no_port_binding_failure(port) [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.930085] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] raise exception.PortBindingFailed(port_id=port['id']) [ 690.931127] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 690.931127] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] [ 690.931127] env[61972]: INFO nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Terminating instance [ 691.118975] env[61972]: DEBUG nova.network.neutron [-] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.238382] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.373192] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.433737] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.433926] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.434131] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.621638] env[61972]: INFO nova.compute.manager [-] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Took 1.03 seconds to deallocate network for instance. [ 691.624466] env[61972]: DEBUG nova.compute.claims [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 691.624670] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.876222] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Releasing lock "refresh_cache-0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.876461] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 691.876640] env[61972]: DEBUG nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 691.876807] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 691.893470] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.954876] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.071222] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.089096] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7378d73-f217-444f-bde1-c8282d7cbffe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.096891] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1513673c-2e58-4ba6-8121-27fb3f057c47 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.126831] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b32517d-0ca0-4e4e-8249-13dff204abc2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.133763] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10431cc-d3f2-4cc7-9800-aa7ec83d4202 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.146295] env[61972]: DEBUG nova.compute.provider_tree [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.358925] env[61972]: DEBUG nova.compute.manager [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Received event network-changed-11325322-ae52-4fd2-b017-297605a61bcb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 692.359032] env[61972]: DEBUG nova.compute.manager [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Refreshing instance network info cache due to event network-changed-11325322-ae52-4fd2-b017-297605a61bcb. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 692.359224] env[61972]: DEBUG oslo_concurrency.lockutils [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] Acquiring lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.396508] env[61972]: DEBUG nova.network.neutron [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.574047] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.574490] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 692.574683] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.574990] env[61972]: DEBUG oslo_concurrency.lockutils [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] Acquired lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.575178] env[61972]: DEBUG nova.network.neutron [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Refreshing network info cache for port 11325322-ae52-4fd2-b017-297605a61bcb {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.576278] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-144dd4aa-48a7-4a1a-94a2-d513748c23f4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.587052] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaf5d1f-74bf-415f-bbb3-97a7e65ca368 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.608883] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 92b06621-cdaa-4723-b339-c0f698897d24 could not be found. [ 692.609128] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 692.609311] env[61972]: INFO nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Took 0.03 seconds to destroy the instance on the hypervisor. [ 692.609545] env[61972]: DEBUG oslo.service.loopingcall [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.609762] env[61972]: DEBUG nova.compute.manager [-] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 692.609912] env[61972]: DEBUG nova.network.neutron [-] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 692.631072] env[61972]: DEBUG nova.network.neutron [-] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.649183] env[61972]: DEBUG nova.scheduler.client.report [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 692.899381] env[61972]: INFO nova.compute.manager [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] [instance: 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4] Took 1.02 seconds to deallocate network for instance. [ 693.098501] env[61972]: DEBUG nova.network.neutron [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.133723] env[61972]: DEBUG nova.network.neutron [-] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.154449] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.154972] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 693.157467] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.554s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.197375] env[61972]: DEBUG nova.network.neutron [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.637404] env[61972]: INFO nova.compute.manager [-] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Took 1.03 seconds to deallocate network for instance. [ 693.639713] env[61972]: DEBUG nova.compute.claims [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 693.639892] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.662118] env[61972]: DEBUG nova.compute.utils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 693.665932] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 693.666110] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 693.700443] env[61972]: DEBUG oslo_concurrency.lockutils [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] Releasing lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.700687] env[61972]: DEBUG nova.compute.manager [req-388cdeae-fae4-4b86-b497-df0bc8c3bbf7 req-1d7c00c8-12ee-42d7-a493-ac511f4b4f82 service nova] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Received event network-vif-deleted-11325322-ae52-4fd2-b017-297605a61bcb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 693.735507] env[61972]: DEBUG nova.policy [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34838d72bc5c40e4861aeb1bc2346e0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e6f816e56de421ba4a2d7de91a6550c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 693.932150] env[61972]: INFO nova.scheduler.client.report [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Deleted allocations for instance 0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4 [ 694.068385] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5c23be-1c86-4df5-af92-aaaee884e510 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.078637] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58672732-f9cc-4bbc-bc89-69271c69c11b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.110572] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a23f32-ea2f-4b10-a658-d2301ce10dcd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.117659] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b60c5f1-b9ca-4947-a028-d2a8044f67db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.131932] env[61972]: DEBUG nova.compute.provider_tree [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.133950] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Successfully created port: c9cc0c9c-9404-44be-84c8-6c8ac8c7b890 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.169086] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 694.447016] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37afbf88-de2f-4564-825b-7490fbbb2712 tempest-ServersAdminTestJSON-1940994138 tempest-ServersAdminTestJSON-1940994138-project-member] Lock "0ac2f6eb-8d2e-4ae2-9819-d685a5bfafc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.693s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.641033] env[61972]: DEBUG nova.scheduler.client.report [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 694.949835] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 695.146593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.989s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.147258] env[61972]: ERROR nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Traceback (most recent call last): [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self.driver.spawn(context, instance, image_meta, [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] vm_ref = self.build_virtual_machine(instance, [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.147258] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] for vif in network_info: [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return self._sync_wrapper(fn, *args, **kwargs) [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self.wait() [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self[:] = self._gt.wait() [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return self._exit_event.wait() [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] result = hub.switch() [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.147585] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return self.greenlet.switch() [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] result = function(*args, **kwargs) [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] return func(*args, **kwargs) [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] raise e [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] nwinfo = self.network_api.allocate_for_instance( [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] created_port_ids = self._update_ports_for_instance( [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] with excutils.save_and_reraise_exception(): [ 695.147903] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] self.force_reraise() [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] raise self.value [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] updated_port = self._update_port( [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] _ensure_no_port_binding_failure(port) [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] raise exception.PortBindingFailed(port_id=port['id']) [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] nova.exception.PortBindingFailed: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. [ 695.148236] env[61972]: ERROR nova.compute.manager [instance: 5aba271f-72bb-4847-8c87-18adda584a74] [ 695.148579] env[61972]: DEBUG nova.compute.utils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 695.150885] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.838s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.154177] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Build of instance 5aba271f-72bb-4847-8c87-18adda584a74 was re-scheduled: Binding failed for port dc2c0cbb-b7e3-48f2-ad9d-1bcb5b0da665, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 695.154608] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 695.154825] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquiring lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.154999] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Acquired lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.155226] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.178933] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 695.223156] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 695.223430] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 695.223582] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.223756] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 695.223894] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.224107] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 695.225987] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 695.225987] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 695.225987] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 695.225987] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 695.225987] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 695.226218] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7680f9-9eb8-41bf-a1b0-a3f971c5f22d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.235743] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffde5beb-4669-4a8b-a784-6cfd0b9c06ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.321886] env[61972]: DEBUG nova.compute.manager [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Received event network-changed-c9cc0c9c-9404-44be-84c8-6c8ac8c7b890 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 695.321961] env[61972]: DEBUG nova.compute.manager [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Refreshing instance network info cache due to event network-changed-c9cc0c9c-9404-44be-84c8-6c8ac8c7b890. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 695.322179] env[61972]: DEBUG oslo_concurrency.lockutils [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] Acquiring lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.322324] env[61972]: DEBUG oslo_concurrency.lockutils [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] Acquired lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.322482] env[61972]: DEBUG nova.network.neutron [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Refreshing network info cache for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.476115] env[61972]: ERROR nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 695.476115] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.476115] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.476115] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.476115] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.476115] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.476115] env[61972]: ERROR nova.compute.manager raise self.value [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.476115] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 695.476115] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.476115] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 695.478104] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.478104] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 695.478104] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 695.478104] env[61972]: ERROR nova.compute.manager [ 695.478104] env[61972]: Traceback (most recent call last): [ 695.478104] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 695.478104] env[61972]: listener.cb(fileno) [ 695.478104] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.478104] env[61972]: result = function(*args, **kwargs) [ 695.478104] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 695.478104] env[61972]: return func(*args, **kwargs) [ 695.478104] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.478104] env[61972]: raise e [ 695.478104] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.478104] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 695.478104] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.478104] env[61972]: created_port_ids = self._update_ports_for_instance( [ 695.478104] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.478104] env[61972]: with excutils.save_and_reraise_exception(): [ 695.478104] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.478104] env[61972]: self.force_reraise() [ 695.478104] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.478104] env[61972]: raise self.value [ 695.478104] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.478104] env[61972]: updated_port = self._update_port( [ 695.478104] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.478104] env[61972]: _ensure_no_port_binding_failure(port) [ 695.478104] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.478104] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 695.478973] env[61972]: nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 695.478973] env[61972]: Removing descriptor: 19 [ 695.478973] env[61972]: ERROR nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Traceback (most recent call last): [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] yield resources [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self.driver.spawn(context, instance, image_meta, [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.478973] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] vm_ref = self.build_virtual_machine(instance, [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] for vif in network_info: [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return self._sync_wrapper(fn, *args, **kwargs) [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self.wait() [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self[:] = self._gt.wait() [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return self._exit_event.wait() [ 695.479331] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] result = hub.switch() [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return self.greenlet.switch() [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] result = function(*args, **kwargs) [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return func(*args, **kwargs) [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] raise e [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] nwinfo = self.network_api.allocate_for_instance( [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.479653] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] created_port_ids = self._update_ports_for_instance( [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] with excutils.save_and_reraise_exception(): [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self.force_reraise() [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] raise self.value [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] updated_port = self._update_port( [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] _ensure_no_port_binding_failure(port) [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.479995] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] raise exception.PortBindingFailed(port_id=port['id']) [ 695.481622] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 695.481622] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] [ 695.481622] env[61972]: INFO nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Terminating instance [ 695.481622] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.680308] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.808198] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.844136] env[61972]: DEBUG nova.network.neutron [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.944984] env[61972]: DEBUG nova.network.neutron [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.985977] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.099937] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844f311c-e2f0-4980-b640-6ec1534d1dce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.109779] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72cadad-23da-4fbf-a5f9-3dde1763baf5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.141929] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab1971a-ef07-411a-9aff-265c44ddfb6d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.149770] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0660b1e3-b73a-445d-b3d8-edc775b90d04 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.162829] env[61972]: DEBUG nova.compute.provider_tree [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.311289] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Releasing lock "refresh_cache-5aba271f-72bb-4847-8c87-18adda584a74" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.311537] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 696.311716] env[61972]: DEBUG nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 696.311880] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.341029] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.448940] env[61972]: DEBUG oslo_concurrency.lockutils [req-92d93a73-1d40-4b66-8db7-22dd96aaeac1 req-8ed9265e-5026-4d32-b642-e23727fd9ccb service nova] Releasing lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.449373] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.449552] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.669612] env[61972]: DEBUG nova.scheduler.client.report [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 696.843516] env[61972]: DEBUG nova.network.neutron [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.970828] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.105775] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.174815] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.024s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.175471] env[61972]: ERROR nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Traceback (most recent call last): [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self.driver.spawn(context, instance, image_meta, [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] vm_ref = self.build_virtual_machine(instance, [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.175471] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] for vif in network_info: [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return self._sync_wrapper(fn, *args, **kwargs) [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self.wait() [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self[:] = self._gt.wait() [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return self._exit_event.wait() [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] result = hub.switch() [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.175830] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return self.greenlet.switch() [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] result = function(*args, **kwargs) [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] return func(*args, **kwargs) [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] raise e [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] nwinfo = self.network_api.allocate_for_instance( [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] created_port_ids = self._update_ports_for_instance( [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] with excutils.save_and_reraise_exception(): [ 697.176200] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] self.force_reraise() [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] raise self.value [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] updated_port = self._update_port( [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] _ensure_no_port_binding_failure(port) [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] raise exception.PortBindingFailed(port_id=port['id']) [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] nova.exception.PortBindingFailed: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. [ 697.176520] env[61972]: ERROR nova.compute.manager [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] [ 697.176790] env[61972]: DEBUG nova.compute.utils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.177509] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.253s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.180392] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Build of instance 9bfde590-fe6c-404d-88ad-9da1763c0870 was re-scheduled: Binding failed for port 753f3805-c146-4136-9b57-ec2cd4660667, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 697.180804] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 697.181032] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquiring lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.181187] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Acquired lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.181334] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.349402] env[61972]: INFO nova.compute.manager [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] [instance: 5aba271f-72bb-4847-8c87-18adda584a74] Took 1.03 seconds to deallocate network for instance. [ 697.394978] env[61972]: DEBUG nova.compute.manager [req-2bc13b65-aec6-463f-9292-5320070662b9 req-b9e7aa58-26f9-4871-89b3-a03429fa6fd1 service nova] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Received event network-vif-deleted-c9cc0c9c-9404-44be-84c8-6c8ac8c7b890 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 697.608518] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.608969] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 697.609181] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 697.609496] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5e281b9-452a-400d-86b4-8a29af24bf6d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.618283] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0be94c-98b8-4098-aafb-bcacd70d9605 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.640462] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 50e4d9d3-a17b-4bb2-9816-bb44f269370e could not be found. [ 697.640585] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.640741] env[61972]: INFO nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 697.640980] env[61972]: DEBUG oslo.service.loopingcall [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.641691] env[61972]: DEBUG nova.compute.manager [-] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 697.641691] env[61972]: DEBUG nova.network.neutron [-] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.658819] env[61972]: DEBUG nova.network.neutron [-] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.702146] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.797057] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.113103] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e33005d-3cca-4531-9d6f-1128d168dbed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.118699] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d39f09d-7ea2-4262-8013-ea2ae9cf26ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.150276] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c644a51-c4e6-4655-b1ed-4418282ad28d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.157483] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e45de94-afc0-4463-8828-4bc3afa921d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.169966] env[61972]: DEBUG nova.network.neutron [-] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.171624] env[61972]: DEBUG nova.compute.provider_tree [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.301478] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Releasing lock "refresh_cache-9bfde590-fe6c-404d-88ad-9da1763c0870" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.301747] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 698.301938] env[61972]: DEBUG nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 698.302170] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.331348] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.384335] env[61972]: INFO nova.scheduler.client.report [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Deleted allocations for instance 5aba271f-72bb-4847-8c87-18adda584a74 [ 698.675624] env[61972]: DEBUG nova.scheduler.client.report [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 698.678854] env[61972]: INFO nova.compute.manager [-] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Took 1.04 seconds to deallocate network for instance. [ 698.680943] env[61972]: DEBUG nova.compute.claims [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 698.681133] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.833722] env[61972]: DEBUG nova.network.neutron [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.891835] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a8266c3f-7de3-4246-904e-c39c8676b8d8 tempest-ServerActionsTestJSON-1375397780 tempest-ServerActionsTestJSON-1375397780-project-member] Lock "5aba271f-72bb-4847-8c87-18adda584a74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.932s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.184033] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.185304] env[61972]: ERROR nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Traceback (most recent call last): [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self.driver.spawn(context, instance, image_meta, [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] vm_ref = self.build_virtual_machine(instance, [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.185304] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] for vif in network_info: [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return self._sync_wrapper(fn, *args, **kwargs) [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self.wait() [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self[:] = self._gt.wait() [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return self._exit_event.wait() [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] result = hub.switch() [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 699.185713] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return self.greenlet.switch() [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] result = function(*args, **kwargs) [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] return func(*args, **kwargs) [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] raise e [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] nwinfo = self.network_api.allocate_for_instance( [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] created_port_ids = self._update_ports_for_instance( [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] with excutils.save_and_reraise_exception(): [ 699.186199] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] self.force_reraise() [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] raise self.value [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] updated_port = self._update_port( [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] _ensure_no_port_binding_failure(port) [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] raise exception.PortBindingFailed(port_id=port['id']) [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] nova.exception.PortBindingFailed: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. [ 699.187083] env[61972]: ERROR nova.compute.manager [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] [ 699.187427] env[61972]: DEBUG nova.compute.utils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 699.187427] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.504s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.189396] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Build of instance 5c036232-736c-4c34-a2b7-7de517b9cd50 was re-scheduled: Binding failed for port be72ce58-4074-47db-9ae6-d521a1242017, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 699.189818] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 699.190055] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquiring lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.190204] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Acquired lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.190358] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.336457] env[61972]: INFO nova.compute.manager [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] [instance: 9bfde590-fe6c-404d-88ad-9da1763c0870] Took 1.03 seconds to deallocate network for instance. [ 699.394757] env[61972]: DEBUG nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 699.727909] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.829614] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.920751] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.154239] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f9f9f4-62f3-44e5-b0d7-aa853135e069 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.164629] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1364909-8f3a-4c02-abe5-b6561f1ca583 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.194850] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e359eeec-21f2-4c5d-84cc-faf6bf593011 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.203382] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f27bb23-3d73-4714-9cbc-bb054ed85353 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.217348] env[61972]: DEBUG nova.compute.provider_tree [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.332815] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Releasing lock "refresh_cache-5c036232-736c-4c34-a2b7-7de517b9cd50" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.333160] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 700.333367] env[61972]: DEBUG nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 700.333533] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.354881] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.377974] env[61972]: INFO nova.scheduler.client.report [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Deleted allocations for instance 9bfde590-fe6c-404d-88ad-9da1763c0870 [ 700.721317] env[61972]: DEBUG nova.scheduler.client.report [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 700.861588] env[61972]: DEBUG nova.network.neutron [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.890561] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8723b63b-93d3-4cbc-850b-df0ffee755d1 tempest-ServersTestJSON-679088604 tempest-ServersTestJSON-679088604-project-member] Lock "9bfde590-fe6c-404d-88ad-9da1763c0870" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.987s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.226392] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.040s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.227456] env[61972]: ERROR nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Traceback (most recent call last): [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self.driver.spawn(context, instance, image_meta, [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] vm_ref = self.build_virtual_machine(instance, [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] vif_infos = vmwarevif.get_vif_info(self._session, [ 701.227456] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] for vif in network_info: [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return self._sync_wrapper(fn, *args, **kwargs) [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self.wait() [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self[:] = self._gt.wait() [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return self._exit_event.wait() [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] result = hub.switch() [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 701.227805] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return self.greenlet.switch() [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] result = function(*args, **kwargs) [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] return func(*args, **kwargs) [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] raise e [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] nwinfo = self.network_api.allocate_for_instance( [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] created_port_ids = self._update_ports_for_instance( [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] with excutils.save_and_reraise_exception(): [ 701.228185] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] self.force_reraise() [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] raise self.value [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] updated_port = self._update_port( [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] _ensure_no_port_binding_failure(port) [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] raise exception.PortBindingFailed(port_id=port['id']) [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] nova.exception.PortBindingFailed: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. [ 701.228533] env[61972]: ERROR nova.compute.manager [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] [ 701.228820] env[61972]: DEBUG nova.compute.utils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 701.232797] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Build of instance eeb44b48-ed08-4f20-9498-b0eed38a00a2 was re-scheduled: Binding failed for port 04b06b41-42b5-4d04-ba77-d7a56bbce454, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 701.233276] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 701.233459] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquiring lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.233602] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Acquired lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.233754] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.237625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.465s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.367243] env[61972]: INFO nova.compute.manager [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] [instance: 5c036232-736c-4c34-a2b7-7de517b9cd50] Took 1.03 seconds to deallocate network for instance. [ 701.394851] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 701.762166] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.922375] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.962634] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.244025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b403bb3-da1c-44ce-b75e-b9319db65eae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.252256] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47ca007-0269-4660-a6bb-d26ae1d27e59 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.287740] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6717dad-9c91-46ff-96e8-4b1e21df162e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.300580] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2a0ffe-e4da-497b-b407-acb8793c4cbc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.316223] env[61972]: DEBUG nova.compute.provider_tree [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.414595] env[61972]: INFO nova.scheduler.client.report [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Deleted allocations for instance 5c036232-736c-4c34-a2b7-7de517b9cd50 [ 702.466791] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Releasing lock "refresh_cache-eeb44b48-ed08-4f20-9498-b0eed38a00a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.466791] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 702.466791] env[61972]: DEBUG nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 702.466791] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.480542] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.819507] env[61972]: DEBUG nova.scheduler.client.report [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 702.929731] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5034d5b5-baa8-4780-bcb4-e77043204ab3 tempest-ServerGroupTestJSON-1400091871 tempest-ServerGroupTestJSON-1400091871-project-member] Lock "5c036232-736c-4c34-a2b7-7de517b9cd50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.566s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.985651] env[61972]: DEBUG nova.network.neutron [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.327457] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.091s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.328254] env[61972]: ERROR nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Traceback (most recent call last): [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self.driver.spawn(context, instance, image_meta, [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] vm_ref = self.build_virtual_machine(instance, [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] vif_infos = vmwarevif.get_vif_info(self._session, [ 703.328254] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] for vif in network_info: [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return self._sync_wrapper(fn, *args, **kwargs) [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self.wait() [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self[:] = self._gt.wait() [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return self._exit_event.wait() [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] result = hub.switch() [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 703.328645] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return self.greenlet.switch() [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] result = function(*args, **kwargs) [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] return func(*args, **kwargs) [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] raise e [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] nwinfo = self.network_api.allocate_for_instance( [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] created_port_ids = self._update_ports_for_instance( [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] with excutils.save_and_reraise_exception(): [ 703.329056] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] self.force_reraise() [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] raise self.value [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] updated_port = self._update_port( [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] _ensure_no_port_binding_failure(port) [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] raise exception.PortBindingFailed(port_id=port['id']) [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] nova.exception.PortBindingFailed: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. [ 703.329486] env[61972]: ERROR nova.compute.manager [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] [ 703.329842] env[61972]: DEBUG nova.compute.utils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 703.333702] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Build of instance 503419e5-ae32-49d4-bc41-838fb3c9437e was re-scheduled: Binding failed for port 7fbdef94-9fb6-4093-8c58-de0936af0d85, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 703.334182] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 703.334562] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquiring lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.334763] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Acquired lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.334978] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.336170] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.813s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.337903] env[61972]: INFO nova.compute.claims [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.433757] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 703.488919] env[61972]: INFO nova.compute.manager [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] [instance: eeb44b48-ed08-4f20-9498-b0eed38a00a2] Took 1.02 seconds to deallocate network for instance. [ 703.867455] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.962568] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.980183] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.484445] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Releasing lock "refresh_cache-503419e5-ae32-49d4-bc41-838fb3c9437e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.484672] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 704.484863] env[61972]: DEBUG nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 704.484863] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.518278] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.529347] env[61972]: INFO nova.scheduler.client.report [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Deleted allocations for instance eeb44b48-ed08-4f20-9498-b0eed38a00a2 [ 704.668150] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquiring lock "8a9a51b5-a8a5-4bda-a36c-682758f50745" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.670276] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Lock "8a9a51b5-a8a5-4bda-a36c-682758f50745" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.798888] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92530233-bdbb-407d-b44a-95006cb06b35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.808910] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c490aa37-e630-4a03-8beb-4496c727cfa0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.839192] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc635af0-d0dd-4a4e-ad4e-e835086a7894 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.849665] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95d03d7-c5de-4c98-be1c-f380a5cafe9f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.864223] env[61972]: DEBUG nova.compute.provider_tree [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.023653] env[61972]: DEBUG nova.network.neutron [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.039763] env[61972]: DEBUG oslo_concurrency.lockutils [None req-edba7de7-6ec9-4851-a918-a35f0772a600 tempest-ServerTagsTestJSON-273912885 tempest-ServerTagsTestJSON-273912885-project-member] Lock "eeb44b48-ed08-4f20-9498-b0eed38a00a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.461s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.368045] env[61972]: DEBUG nova.scheduler.client.report [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 705.529558] env[61972]: INFO nova.compute.manager [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] [instance: 503419e5-ae32-49d4-bc41-838fb3c9437e] Took 1.04 seconds to deallocate network for instance. [ 705.543386] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 705.877747] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.878873] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 705.883477] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.259s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.068906] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.389616] env[61972]: DEBUG nova.compute.utils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 706.393824] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 706.394031] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 706.529940] env[61972]: DEBUG nova.policy [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34838d72bc5c40e4861aeb1bc2346e0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e6f816e56de421ba4a2d7de91a6550c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 706.575702] env[61972]: INFO nova.scheduler.client.report [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Deleted allocations for instance 503419e5-ae32-49d4-bc41-838fb3c9437e [ 706.878639] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b69f86-531c-46f0-acca-96c009304fb2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.891507] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbac484-f405-4e93-b55b-ffe6538d712c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.896732] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 706.928933] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dff4c0-4196-46ba-9325-fcddd54f90cf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.938372] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59839156-816a-4636-a375-72c1b57796e8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.957199] env[61972]: DEBUG nova.compute.provider_tree [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.085625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-84ff4bd7-15c0-412d-b664-ff1de3847a79 tempest-ServersV294TestFqdnHostnames-1562221556 tempest-ServersV294TestFqdnHostnames-1562221556-project-member] Lock "503419e5-ae32-49d4-bc41-838fb3c9437e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.029s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.195950] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Successfully created port: 5afa0c7c-d65e-4136-a05f-576cf397cf44 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.461255] env[61972]: DEBUG nova.scheduler.client.report [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 707.591650] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 707.936140] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 707.970249] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 707.970249] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 707.970249] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.970566] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 707.970566] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.970566] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 707.970566] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 707.970566] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 707.970803] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 707.970803] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 707.970803] env[61972]: DEBUG nova.virt.hardware [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.974216] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.089s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.974216] env[61972]: ERROR nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Traceback (most recent call last): [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self.driver.spawn(context, instance, image_meta, [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 707.974216] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] vm_ref = self.build_virtual_machine(instance, [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] vif_infos = vmwarevif.get_vif_info(self._session, [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] for vif in network_info: [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return self._sync_wrapper(fn, *args, **kwargs) [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self.wait() [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self[:] = self._gt.wait() [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return self._exit_event.wait() [ 707.974521] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] result = hub.switch() [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return self.greenlet.switch() [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] result = function(*args, **kwargs) [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] return func(*args, **kwargs) [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] raise e [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] nwinfo = self.network_api.allocate_for_instance( [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.974884] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] created_port_ids = self._update_ports_for_instance( [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] with excutils.save_and_reraise_exception(): [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] self.force_reraise() [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] raise self.value [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] updated_port = self._update_port( [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] _ensure_no_port_binding_failure(port) [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.975367] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] raise exception.PortBindingFailed(port_id=port['id']) [ 707.975699] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] nova.exception.PortBindingFailed: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. [ 707.975699] env[61972]: ERROR nova.compute.manager [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] [ 707.975699] env[61972]: DEBUG nova.compute.utils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 707.976906] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5bafb1-2db5-439e-8b43-8f3e3a7495e9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.980553] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Build of instance 5300907c-d589-4ccf-a9c5-4a6bd819783b was re-scheduled: Binding failed for port 5c505086-4186-4f14-a033-2f16f1d406b8, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 707.981249] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 707.981524] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquiring lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.981742] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Acquired lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.981948] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.983904] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.344s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.996486] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f23a88-b4c0-4b2c-ac14-4741db58fe46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.117399] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.523659] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.775333] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.936510] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db81fd5-a5c5-4ed0-9901-be42bbcc531c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.945998] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e37f35-6c38-4908-b4ea-634dbbbd4b83 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.984023] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da045f72-7843-4417-92b3-f0af71c9585a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.988774] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcab562-f539-42da-a795-e05566fa16b4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.003511] env[61972]: DEBUG nova.compute.provider_tree [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.277819] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Releasing lock "refresh_cache-5300907c-d589-4ccf-a9c5-4a6bd819783b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.277819] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 709.277819] env[61972]: DEBUG nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 709.278111] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 709.320807] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.511551] env[61972]: DEBUG nova.scheduler.client.report [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 709.683418] env[61972]: DEBUG nova.compute.manager [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Received event network-changed-5afa0c7c-d65e-4136-a05f-576cf397cf44 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 709.683418] env[61972]: DEBUG nova.compute.manager [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Refreshing instance network info cache due to event network-changed-5afa0c7c-d65e-4136-a05f-576cf397cf44. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 709.683418] env[61972]: DEBUG oslo_concurrency.lockutils [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] Acquiring lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.683418] env[61972]: DEBUG oslo_concurrency.lockutils [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] Acquired lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.683418] env[61972]: DEBUG nova.network.neutron [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Refreshing network info cache for port 5afa0c7c-d65e-4136-a05f-576cf397cf44 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.823509] env[61972]: DEBUG nova.network.neutron [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.005263] env[61972]: ERROR nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 710.005263] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 710.005263] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.005263] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.005263] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.005263] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.005263] env[61972]: ERROR nova.compute.manager raise self.value [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.005263] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 710.005263] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.005263] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 710.005761] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.005761] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 710.005761] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 710.005761] env[61972]: ERROR nova.compute.manager [ 710.005761] env[61972]: Traceback (most recent call last): [ 710.005761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 710.005761] env[61972]: listener.cb(fileno) [ 710.005761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.005761] env[61972]: result = function(*args, **kwargs) [ 710.005761] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.005761] env[61972]: return func(*args, **kwargs) [ 710.005761] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 710.005761] env[61972]: raise e [ 710.005761] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 710.005761] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 710.005761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.005761] env[61972]: created_port_ids = self._update_ports_for_instance( [ 710.005761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.005761] env[61972]: with excutils.save_and_reraise_exception(): [ 710.005761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.005761] env[61972]: self.force_reraise() [ 710.005761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.005761] env[61972]: raise self.value [ 710.005761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.005761] env[61972]: updated_port = self._update_port( [ 710.005761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.005761] env[61972]: _ensure_no_port_binding_failure(port) [ 710.005761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.005761] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 710.006727] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 710.006727] env[61972]: Removing descriptor: 19 [ 710.006727] env[61972]: ERROR nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Traceback (most recent call last): [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] yield resources [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self.driver.spawn(context, instance, image_meta, [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 710.006727] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] vm_ref = self.build_virtual_machine(instance, [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] vif_infos = vmwarevif.get_vif_info(self._session, [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] for vif in network_info: [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return self._sync_wrapper(fn, *args, **kwargs) [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self.wait() [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self[:] = self._gt.wait() [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return self._exit_event.wait() [ 710.007061] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] result = hub.switch() [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return self.greenlet.switch() [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] result = function(*args, **kwargs) [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return func(*args, **kwargs) [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] raise e [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] nwinfo = self.network_api.allocate_for_instance( [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.008316] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] created_port_ids = self._update_ports_for_instance( [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] with excutils.save_and_reraise_exception(): [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self.force_reraise() [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] raise self.value [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] updated_port = self._update_port( [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] _ensure_no_port_binding_failure(port) [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.008968] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] raise exception.PortBindingFailed(port_id=port['id']) [ 710.009372] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 710.009372] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] [ 710.009372] env[61972]: INFO nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Terminating instance [ 710.023020] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.036s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.023020] env[61972]: ERROR nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Traceback (most recent call last): [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self.driver.spawn(context, instance, image_meta, [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 710.023020] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] vm_ref = self.build_virtual_machine(instance, [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] vif_infos = vmwarevif.get_vif_info(self._session, [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] for vif in network_info: [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return self._sync_wrapper(fn, *args, **kwargs) [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self.wait() [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self[:] = self._gt.wait() [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return self._exit_event.wait() [ 710.023409] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] result = hub.switch() [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return self.greenlet.switch() [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] result = function(*args, **kwargs) [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] return func(*args, **kwargs) [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] raise e [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] nwinfo = self.network_api.allocate_for_instance( [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.023772] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] created_port_ids = self._update_ports_for_instance( [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] with excutils.save_and_reraise_exception(): [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] self.force_reraise() [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] raise self.value [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] updated_port = self._update_port( [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] _ensure_no_port_binding_failure(port) [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.024152] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] raise exception.PortBindingFailed(port_id=port['id']) [ 710.024536] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] nova.exception.PortBindingFailed: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. [ 710.024536] env[61972]: ERROR nova.compute.manager [instance: 92b06621-cdaa-4723-b339-c0f698897d24] [ 710.024536] env[61972]: DEBUG nova.compute.utils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 710.024536] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.541s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.027818] env[61972]: INFO nova.compute.claims [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.034190] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Build of instance 92b06621-cdaa-4723-b339-c0f698897d24 was re-scheduled: Binding failed for port 11325322-ae52-4fd2-b017-297605a61bcb, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 710.035116] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 710.035116] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.038127] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.038127] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.218139] env[61972]: DEBUG nova.network.neutron [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.329484] env[61972]: INFO nova.compute.manager [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] [instance: 5300907c-d589-4ccf-a9c5-4a6bd819783b] Took 1.05 seconds to deallocate network for instance. [ 710.387366] env[61972]: DEBUG nova.network.neutron [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.511690] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.705193] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.826972] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.888923] env[61972]: DEBUG oslo_concurrency.lockutils [req-13c28fb7-ae66-4fb6-9e63-1f4dc4a8712b req-4f1a52ad-a7cf-46d7-b87b-ccf13c5a4147 service nova] Releasing lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.892717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.892717] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.121093] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.121093] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.330975] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-92b06621-cdaa-4723-b339-c0f698897d24" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.331854] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 711.331854] env[61972]: DEBUG nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 711.331854] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.349944] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.364389] env[61972]: INFO nova.scheduler.client.report [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Deleted allocations for instance 5300907c-d589-4ccf-a9c5-4a6bd819783b [ 711.402952] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43abca87-72a4-461f-b214-c75566d9b55f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.411164] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416d1210-b612-4aef-9ae1-04aa90ca282e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.445643] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.447930] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d58fa5b-f16e-4af9-bef8-fda8a4090b51 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.455794] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e2de9e-939e-4b7e-b536-ddcc3a1e3fde {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.470850] env[61972]: DEBUG nova.compute.provider_tree [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.565615] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.707323] env[61972]: DEBUG nova.compute.manager [req-60d73b12-84aa-4a0d-97bd-1a4e0002eac5 req-4813c8f6-f66b-4bb5-8bd3-f6abdab5bd81 service nova] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Received event network-vif-deleted-5afa0c7c-d65e-4136-a05f-576cf397cf44 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 711.852706] env[61972]: DEBUG nova.network.neutron [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.875886] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6a8b5cf4-b12b-404b-99ff-6a1db5e84f26 tempest-ServerAddressesNegativeTestJSON-459735792 tempest-ServerAddressesNegativeTestJSON-459735792-project-member] Lock "5300907c-d589-4ccf-a9c5-4a6bd819783b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.901s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.975273] env[61972]: DEBUG nova.scheduler.client.report [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 712.068239] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.068705] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 712.069947] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.069947] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be4949a2-2c9b-463a-a99f-c242d13bcc35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.079845] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00ff971-cd42-4d00-be16-f7c8220ae67a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.102394] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 22634f52-c696-417b-bfe9-0a7ca62aad40 could not be found. [ 712.102627] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.102829] env[61972]: INFO nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Took 0.03 seconds to destroy the instance on the hypervisor. [ 712.103063] env[61972]: DEBUG oslo.service.loopingcall [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.103295] env[61972]: DEBUG nova.compute.manager [-] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 712.103394] env[61972]: DEBUG nova.network.neutron [-] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.123202] env[61972]: DEBUG nova.network.neutron [-] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.356444] env[61972]: INFO nova.compute.manager [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 92b06621-cdaa-4723-b339-c0f698897d24] Took 1.02 seconds to deallocate network for instance. [ 712.379369] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 712.484040] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.484040] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 712.484550] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.803s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.631511] env[61972]: DEBUG nova.network.neutron [-] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.905743] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.993205] env[61972]: DEBUG nova.compute.utils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 712.994913] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 712.995288] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.069262] env[61972]: DEBUG nova.policy [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b12122c73ec440098daf417330d4443f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d3e3e9945fc4774a35ad817e8e312ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 713.136855] env[61972]: INFO nova.compute.manager [-] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Took 1.03 seconds to deallocate network for instance. [ 713.139882] env[61972]: DEBUG nova.compute.claims [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 713.140266] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.389022] env[61972]: INFO nova.scheduler.client.report [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted allocations for instance 92b06621-cdaa-4723-b339-c0f698897d24 [ 713.441912] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b2e96f-cec6-4df4-9d3d-bd98108d6fb1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.452756] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0847431-6db4-4650-ac03-0307960ceba1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.482658] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0ff3cc-8987-402e-85aa-076f93f0c127 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.491168] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69043d59-e6b0-4e49-b334-39941dfaf12d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.512719] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 713.515509] env[61972]: DEBUG nova.compute.provider_tree [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.738533] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Successfully created port: 886b147b-8c50-447e-91a7-17d70f5bc53e {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.896993] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5338f093-0768-4383-b391-7edda3895515 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "92b06621-cdaa-4723-b339-c0f698897d24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.348s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.025829] env[61972]: DEBUG nova.scheduler.client.report [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 714.404049] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 714.531285] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 714.535364] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.051s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.536023] env[61972]: ERROR nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Traceback (most recent call last): [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self.driver.spawn(context, instance, image_meta, [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] vm_ref = self.build_virtual_machine(instance, [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.536023] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] for vif in network_info: [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return self._sync_wrapper(fn, *args, **kwargs) [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self.wait() [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self[:] = self._gt.wait() [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return self._exit_event.wait() [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] result = hub.switch() [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.536368] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return self.greenlet.switch() [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] result = function(*args, **kwargs) [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] return func(*args, **kwargs) [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] raise e [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] nwinfo = self.network_api.allocate_for_instance( [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] created_port_ids = self._update_ports_for_instance( [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] with excutils.save_and_reraise_exception(): [ 714.536699] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] self.force_reraise() [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] raise self.value [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] updated_port = self._update_port( [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] _ensure_no_port_binding_failure(port) [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] raise exception.PortBindingFailed(port_id=port['id']) [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] nova.exception.PortBindingFailed: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. [ 714.537102] env[61972]: ERROR nova.compute.manager [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] [ 714.537439] env[61972]: DEBUG nova.compute.utils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 714.540746] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.618s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.540746] env[61972]: INFO nova.compute.claims [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.543950] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Build of instance 50e4d9d3-a17b-4bb2-9816-bb44f269370e was re-scheduled: Binding failed for port c9cc0c9c-9404-44be-84c8-6c8ac8c7b890, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 714.544427] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 714.544651] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.544819] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.544935] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.584746] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 714.585030] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 714.585267] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.585362] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 714.585571] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.585656] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 714.585849] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 714.585935] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 714.588855] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 714.589101] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 714.589309] env[61972]: DEBUG nova.virt.hardware [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 714.590611] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b94c5bf-375e-44f3-a43b-ad4cc7aa74b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.598785] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c8b28a-5b95-4ccc-9669-9e40dc7ec91a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.940413] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.075508] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.363175] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.766772] env[61972]: ERROR nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 715.766772] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 715.766772] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.766772] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.766772] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.766772] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.766772] env[61972]: ERROR nova.compute.manager raise self.value [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.766772] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 715.766772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.766772] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 715.770075] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.770075] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 715.770075] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 715.770075] env[61972]: ERROR nova.compute.manager [ 715.770075] env[61972]: Traceback (most recent call last): [ 715.770075] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 715.770075] env[61972]: listener.cb(fileno) [ 715.770075] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.770075] env[61972]: result = function(*args, **kwargs) [ 715.770075] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 715.770075] env[61972]: return func(*args, **kwargs) [ 715.770075] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 715.770075] env[61972]: raise e [ 715.770075] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 715.770075] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 715.770075] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.770075] env[61972]: created_port_ids = self._update_ports_for_instance( [ 715.770075] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.770075] env[61972]: with excutils.save_and_reraise_exception(): [ 715.770075] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.770075] env[61972]: self.force_reraise() [ 715.770075] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.770075] env[61972]: raise self.value [ 715.770075] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.770075] env[61972]: updated_port = self._update_port( [ 715.770075] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.770075] env[61972]: _ensure_no_port_binding_failure(port) [ 715.770075] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.770075] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 715.771063] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 715.771063] env[61972]: Removing descriptor: 19 [ 715.771063] env[61972]: ERROR nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Traceback (most recent call last): [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] yield resources [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self.driver.spawn(context, instance, image_meta, [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.771063] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] vm_ref = self.build_virtual_machine(instance, [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] for vif in network_info: [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return self._sync_wrapper(fn, *args, **kwargs) [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self.wait() [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self[:] = self._gt.wait() [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return self._exit_event.wait() [ 715.771417] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] result = hub.switch() [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return self.greenlet.switch() [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] result = function(*args, **kwargs) [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return func(*args, **kwargs) [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] raise e [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] nwinfo = self.network_api.allocate_for_instance( [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.771824] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] created_port_ids = self._update_ports_for_instance( [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] with excutils.save_and_reraise_exception(): [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self.force_reraise() [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] raise self.value [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] updated_port = self._update_port( [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] _ensure_no_port_binding_failure(port) [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.772269] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] raise exception.PortBindingFailed(port_id=port['id']) [ 715.772614] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 715.772614] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] [ 715.772614] env[61972]: INFO nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Terminating instance [ 715.866586] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "refresh_cache-50e4d9d3-a17b-4bb2-9816-bb44f269370e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.866586] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 715.866586] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 715.866586] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.888112] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.966934] env[61972]: DEBUG nova.compute.manager [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Received event network-changed-886b147b-8c50-447e-91a7-17d70f5bc53e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 715.967234] env[61972]: DEBUG nova.compute.manager [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Refreshing instance network info cache due to event network-changed-886b147b-8c50-447e-91a7-17d70f5bc53e. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 715.967548] env[61972]: DEBUG oslo_concurrency.lockutils [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] Acquiring lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.967935] env[61972]: DEBUG oslo_concurrency.lockutils [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] Acquired lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.968410] env[61972]: DEBUG nova.network.neutron [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Refreshing network info cache for port 886b147b-8c50-447e-91a7-17d70f5bc53e {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.043171] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302ac740-de8f-4673-955f-a9e49cb3250c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.054084] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b9bdaa-6a7b-4d9e-85e0-cd40388fad97 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.091589] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d706316-ae79-4830-86ac-4d7730b40f1c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.098654] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a00af1-8a29-4757-ab1c-330db38871d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.113312] env[61972]: DEBUG nova.compute.provider_tree [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.275969] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.392073] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.454955] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "a77d41aa-13ba-4d26-b5fd-4928891948ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.455208] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "a77d41aa-13ba-4d26-b5fd-4928891948ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.455390] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.455565] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.509246] env[61972]: DEBUG nova.network.neutron [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.581654] env[61972]: DEBUG nova.network.neutron [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.616438] env[61972]: DEBUG nova.scheduler.client.report [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 716.894840] env[61972]: INFO nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 50e4d9d3-a17b-4bb2-9816-bb44f269370e] Took 1.03 seconds to deallocate network for instance. [ 717.084432] env[61972]: DEBUG oslo_concurrency.lockutils [req-0a90b774-fe40-4cad-b4ed-0913f0cd8a4d req-8eb8a712-3293-456e-b061-8f38c47d9f89 service nova] Releasing lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.085087] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquired lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.085335] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.121052] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.121558] env[61972]: DEBUG nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 717.124271] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.202s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.125835] env[61972]: INFO nova.compute.claims [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.614589] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.630960] env[61972]: DEBUG nova.compute.utils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 717.638580] env[61972]: DEBUG nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 717.730181] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.923529] env[61972]: INFO nova.scheduler.client.report [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleted allocations for instance 50e4d9d3-a17b-4bb2-9816-bb44f269370e [ 718.004027] env[61972]: DEBUG nova.compute.manager [req-968e37de-634a-4003-b3ad-d4dc95040247 req-7a56b9b6-dd91-44ee-ad81-d38060ff3e32 service nova] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Received event network-vif-deleted-886b147b-8c50-447e-91a7-17d70f5bc53e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 718.139968] env[61972]: DEBUG nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 718.236318] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Releasing lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.236318] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 718.236605] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 718.237140] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8958eb00-b1fa-492a-8cca-9a94bc430310 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.246877] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c612a603-3663-43e5-a058-cdd83d6bbc71 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.276745] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2ba9f652-c274-4d79-84a2-ad1384c99b91 could not be found. [ 718.276977] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 718.277172] env[61972]: INFO nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Took 0.04 seconds to destroy the instance on the hypervisor. [ 718.277429] env[61972]: DEBUG oslo.service.loopingcall [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 718.277665] env[61972]: DEBUG nova.compute.manager [-] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 718.277759] env[61972]: DEBUG nova.network.neutron [-] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.306595] env[61972]: DEBUG nova.network.neutron [-] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.367573] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.367573] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.436379] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "50e4d9d3-a17b-4bb2-9816-bb44f269370e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.166s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.568992] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9287471-b33f-43d8-a279-8933f176e6ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.576500] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf0535f-d483-4206-8f12-b6b1293027dd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.607879] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9828c5-70b6-4ea5-a103-1a6a3ef183ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.614992] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c50cb4-8475-4cb5-8984-54d730e6259d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.627569] env[61972]: DEBUG nova.compute.provider_tree [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.809424] env[61972]: DEBUG nova.network.neutron [-] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.938843] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 719.131184] env[61972]: DEBUG nova.scheduler.client.report [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 719.151705] env[61972]: DEBUG nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 719.180507] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 719.180769] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 719.180920] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.181113] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 719.181258] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.181398] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 719.181601] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 719.181750] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 719.181910] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 719.182118] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 719.182335] env[61972]: DEBUG nova.virt.hardware [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 719.183292] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e24e9a-c7a0-4839-b2e4-626c3532075e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.192172] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101df94b-d8e9-4ab7-9bbd-8d9945728285 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.205782] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.211198] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Creating folder: Project (a1daddcb457f4d78989cb9466e26684b). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 719.211475] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07070dcf-5ba5-4d25-a7e3-de474fe569d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.220822] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Created folder: Project (a1daddcb457f4d78989cb9466e26684b) in parent group-v294799. [ 719.221010] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Creating folder: Instances. Parent ref: group-v294811. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 719.221259] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf19cce6-fe0e-44dd-b877-ef3076f2bbc3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.229208] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Created folder: Instances in parent group-v294811. [ 719.229428] env[61972]: DEBUG oslo.service.loopingcall [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.229605] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 719.229789] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-317dfc98-89ae-4276-90e8-5bbd70c0856f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.245213] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.245213] env[61972]: value = "task-1389092" [ 719.245213] env[61972]: _type = "Task" [ 719.245213] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.252381] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389092, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.312712] env[61972]: INFO nova.compute.manager [-] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Took 1.03 seconds to deallocate network for instance. [ 719.315374] env[61972]: DEBUG nova.compute.claims [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 719.315513] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.463161] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.636238] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.636785] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 719.639503] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.677s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.640960] env[61972]: INFO nova.compute.claims [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.755053] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389092, 'name': CreateVM_Task, 'duration_secs': 0.411714} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.755199] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 719.756038] env[61972]: DEBUG oslo_vmware.service [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0709d86f-9157-412a-955b-ece6c15ec7cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.761303] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.761462] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.761807] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 719.762035] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12b4facf-a444-43c0-baf8-93610f72fbf5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.766086] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 719.766086] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52618fd0-8e17-6b6b-e658-a9d3542923b4" [ 719.766086] env[61972]: _type = "Task" [ 719.766086] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.772657] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52618fd0-8e17-6b6b-e658-a9d3542923b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.146236] env[61972]: DEBUG nova.compute.utils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.151178] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 720.151178] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 720.226513] env[61972]: DEBUG nova.policy [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80d83105fb70448ba5c77c8a4804981a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a3bc061cce2477c92896cfecdce2747', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.276777] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.277047] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.277277] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.277421] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.277595] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.277837] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b7c074a-858d-4044-b760-fffced8a9d77 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.285965] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.285965] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 720.286729] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37b004d-cb50-4681-84dd-d0c020cb9468 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.292746] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc2362d5-b684-4976-b47d-231bb7285179 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.298066] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 720.298066] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]524f1020-ca02-3cf6-bc3c-599ab58e2eac" [ 720.298066] env[61972]: _type = "Task" [ 720.298066] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.305556] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]524f1020-ca02-3cf6-bc3c-599ab58e2eac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.568900] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Successfully created port: c2f9a5cd-c75e-4a15-b45d-819681846033 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.653557] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 720.811874] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Preparing fetch location {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 720.812464] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Creating directory with path [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.812761] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a18bb052-53e4-4318-8360-37c2e88ff59c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.832349] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Created directory with path [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.832555] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Fetch image to [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 720.832734] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Downloading image file data 79227ea9-188c-426d-a7d8-cb14b658f493 to [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk on the data store datastore1 {{(pid=61972) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 720.833511] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c4203c-69e1-4cd5-abfe-6b8a51be8b4f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.843467] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c76161-b345-4792-ae4c-64b1d876fdc9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.858232] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072c20ce-8c57-45f0-a952-19109c1e4e38 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.898089] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c37133e-005f-40c3-8fea-f9326e7ce9cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.904276] env[61972]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e4906aa6-0ad4-4143-8023-b552b0126dcc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.924648] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Downloading image file data 79227ea9-188c-426d-a7d8-cb14b658f493 to the data store datastore1 {{(pid=61972) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 720.984668] env[61972]: DEBUG oslo_vmware.rw_handles [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61972) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 721.122722] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e79640-ad7f-4869-9549-098bd12da515 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.133325] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dc2b57-932a-42e0-a96e-e2fae49a72c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.173449] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac85939-bdb4-4c02-b33a-ac0c4e2887b4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.183178] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7280d31f-c560-4d6c-9491-616f74361d2e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.201491] env[61972]: DEBUG nova.compute.provider_tree [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.599174] env[61972]: DEBUG oslo_vmware.rw_handles [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Completed reading data from the image iterator. {{(pid=61972) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 721.599516] env[61972]: DEBUG oslo_vmware.rw_handles [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 721.669831] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 721.692475] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.692722] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.692989] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.693128] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.693211] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.693353] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.693557] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.693709] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.693868] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.694086] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.694292] env[61972]: DEBUG nova.virt.hardware [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.695185] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae329c9-3f35-42f3-a90a-4d76f13a7549 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.702999] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b6f30c-b887-48f6-968b-b6a788f2c8df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.709351] env[61972]: DEBUG nova.scheduler.client.report [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 721.736350] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Downloaded image file data 79227ea9-188c-426d-a7d8-cb14b658f493 to vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk on the data store datastore1 {{(pid=61972) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 721.738648] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Caching image {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 721.739688] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Copying Virtual Disk [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk to [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 721.739935] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02be5cc5-fcbd-469c-a070-dd8b95e03fdc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.746388] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 721.746388] env[61972]: value = "task-1389093" [ 721.746388] env[61972]: _type = "Task" [ 721.746388] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.753946] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389093, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.880485] env[61972]: DEBUG nova.compute.manager [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Received event network-changed-c2f9a5cd-c75e-4a15-b45d-819681846033 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 721.880723] env[61972]: DEBUG nova.compute.manager [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Refreshing instance network info cache due to event network-changed-c2f9a5cd-c75e-4a15-b45d-819681846033. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 721.881577] env[61972]: DEBUG oslo_concurrency.lockutils [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] Acquiring lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.881577] env[61972]: DEBUG oslo_concurrency.lockutils [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] Acquired lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.881577] env[61972]: DEBUG nova.network.neutron [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Refreshing network info cache for port c2f9a5cd-c75e-4a15-b45d-819681846033 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.982629] env[61972]: ERROR nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 721.982629] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.982629] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.982629] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.982629] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.982629] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.982629] env[61972]: ERROR nova.compute.manager raise self.value [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.982629] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 721.982629] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.982629] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 721.983157] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.983157] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 721.983157] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 721.983157] env[61972]: ERROR nova.compute.manager [ 721.983157] env[61972]: Traceback (most recent call last): [ 721.983157] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 721.983157] env[61972]: listener.cb(fileno) [ 721.983157] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.983157] env[61972]: result = function(*args, **kwargs) [ 721.983157] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 721.983157] env[61972]: return func(*args, **kwargs) [ 721.983157] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 721.983157] env[61972]: raise e [ 721.983157] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.983157] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 721.983157] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.983157] env[61972]: created_port_ids = self._update_ports_for_instance( [ 721.983157] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.983157] env[61972]: with excutils.save_and_reraise_exception(): [ 721.983157] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.983157] env[61972]: self.force_reraise() [ 721.983157] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.983157] env[61972]: raise self.value [ 721.983157] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.983157] env[61972]: updated_port = self._update_port( [ 721.983157] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.983157] env[61972]: _ensure_no_port_binding_failure(port) [ 721.983157] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.983157] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 721.984124] env[61972]: nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 721.984124] env[61972]: Removing descriptor: 19 [ 721.984124] env[61972]: ERROR nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Traceback (most recent call last): [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] yield resources [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self.driver.spawn(context, instance, image_meta, [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.984124] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] vm_ref = self.build_virtual_machine(instance, [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] for vif in network_info: [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return self._sync_wrapper(fn, *args, **kwargs) [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self.wait() [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self[:] = self._gt.wait() [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return self._exit_event.wait() [ 721.984508] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] result = hub.switch() [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return self.greenlet.switch() [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] result = function(*args, **kwargs) [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return func(*args, **kwargs) [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] raise e [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] nwinfo = self.network_api.allocate_for_instance( [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.984923] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] created_port_ids = self._update_ports_for_instance( [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] with excutils.save_and_reraise_exception(): [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self.force_reraise() [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] raise self.value [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] updated_port = self._update_port( [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] _ensure_no_port_binding_failure(port) [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.985336] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] raise exception.PortBindingFailed(port_id=port['id']) [ 721.985774] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 721.985774] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] [ 721.985774] env[61972]: INFO nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Terminating instance [ 722.214220] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.214926] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 722.217582] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.149s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.220058] env[61972]: INFO nova.compute.claims [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.257350] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389093, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.400284] env[61972]: DEBUG nova.network.neutron [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.483844] env[61972]: DEBUG nova.network.neutron [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.488949] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquiring lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.724057] env[61972]: DEBUG nova.compute.utils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 722.727636] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 722.727636] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.757620] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389093, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722057} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.757868] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Copied Virtual Disk [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk to [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 722.758052] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleting the datastore file [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493/tmp-sparse.vmdk {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.758296] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4fe565e-829f-403f-b0c1-6b07a5c443fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.764322] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 722.764322] env[61972]: value = "task-1389094" [ 722.764322] env[61972]: _type = "Task" [ 722.764322] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.771809] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.777039] env[61972]: DEBUG nova.policy [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc3cd61498bc4f858a47a72f02466b3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3c052a272742808be2bcdc71d8f62f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 722.988175] env[61972]: DEBUG oslo_concurrency.lockutils [req-122b494a-aa83-4b8c-9871-53b0c47670ed req-8dc698c6-2acd-4c5e-bc15-89ff7086b763 service nova] Releasing lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.988175] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquired lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.988175] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.139382] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Successfully created port: 0739af50-d18a-4bb5-899a-198661218598 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.228301] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 723.276787] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416782} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.279068] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.279284] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Moving file from [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93/79227ea9-188c-426d-a7d8-cb14b658f493 to [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493. {{(pid=61972) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 723.279968] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-91d26ab4-fef6-4357-bbc0-676d9de37476 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.290000] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 723.290000] env[61972]: value = "task-1389095" [ 723.290000] env[61972]: _type = "Task" [ 723.290000] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.295943] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389095, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.513555] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.587295] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4f162e-3495-40e7-a78e-d0c88b67d7e4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.598981] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494f81ec-0aae-4303-83b1-f98c690e858f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.636881] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae8fe0f-3752-4142-8979-2c0b959481bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.645491] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257156c1-9d10-4893-be55-7d3d11078770 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.661114] env[61972]: DEBUG nova.compute.provider_tree [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.684454] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.796478] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389095, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.029376} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.796758] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] File moved {{(pid=61972) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 723.796995] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Cleaning up location [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 723.797206] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleting the datastore file [datastore1] vmware_temp/483c3b0d-e6d5-4831-93e8-4e29d26c9d93 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.797479] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e894384f-6e49-4aae-9bc6-4596b4e3832c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.803715] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 723.803715] env[61972]: value = "task-1389096" [ 723.803715] env[61972]: _type = "Task" [ 723.803715] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.811563] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.939870] env[61972]: DEBUG nova.compute.manager [req-ed74c5d8-c4f6-4c7e-9473-71d805866f67 req-f5e51764-2206-46d4-a168-edb84703f5c3 service nova] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Received event network-vif-deleted-c2f9a5cd-c75e-4a15-b45d-819681846033 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 724.167100] env[61972]: DEBUG nova.scheduler.client.report [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 724.187051] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Releasing lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.190137] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 724.190137] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 724.190756] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5580fe5-e0d6-4cbf-aaf4-a70f2ce96062 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.204018] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dc1e6e-e638-499f-bd65-d35574fba31c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.237599] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e0d51c99-1916-4d66-a141-dfa5d4357174 could not be found. [ 724.238151] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.241023] env[61972]: INFO nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Took 0.05 seconds to destroy the instance on the hypervisor. [ 724.241023] env[61972]: DEBUG oslo.service.loopingcall [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.241023] env[61972]: DEBUG nova.compute.manager [-] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 724.241023] env[61972]: DEBUG nova.network.neutron [-] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.243053] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 724.265549] env[61972]: DEBUG nova.network.neutron [-] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.281134] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.281134] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.281134] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.281317] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.281317] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.281317] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.281317] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.281317] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.281519] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.281612] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.281916] env[61972]: DEBUG nova.virt.hardware [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.284728] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ab05b8-7747-41fd-b5ff-cff0bd933a02 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.294876] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a07b771-7c94-42de-b483-4dc551841df7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.320150] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025545} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.320586] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.321403] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4014cf4a-72bf-4fef-a199-fd1c7b2a3cd3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.327492] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 724.327492] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e1085a-3186-8a55-d989-76fc69cdd9e5" [ 724.327492] env[61972]: _type = "Task" [ 724.327492] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.335113] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e1085a-3186-8a55-d989-76fc69cdd9e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.474414] env[61972]: ERROR nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 724.474414] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 724.474414] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.474414] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.474414] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.474414] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.474414] env[61972]: ERROR nova.compute.manager raise self.value [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.474414] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 724.474414] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.474414] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 724.474914] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.474914] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 724.474914] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 724.474914] env[61972]: ERROR nova.compute.manager [ 724.474914] env[61972]: Traceback (most recent call last): [ 724.474914] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 724.474914] env[61972]: listener.cb(fileno) [ 724.474914] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.474914] env[61972]: result = function(*args, **kwargs) [ 724.474914] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.474914] env[61972]: return func(*args, **kwargs) [ 724.474914] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 724.474914] env[61972]: raise e [ 724.474914] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 724.474914] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 724.474914] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.474914] env[61972]: created_port_ids = self._update_ports_for_instance( [ 724.474914] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.474914] env[61972]: with excutils.save_and_reraise_exception(): [ 724.474914] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.474914] env[61972]: self.force_reraise() [ 724.474914] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.474914] env[61972]: raise self.value [ 724.474914] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.474914] env[61972]: updated_port = self._update_port( [ 724.474914] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.474914] env[61972]: _ensure_no_port_binding_failure(port) [ 724.474914] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.474914] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 724.475875] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 724.475875] env[61972]: Removing descriptor: 19 [ 724.475875] env[61972]: ERROR nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Traceback (most recent call last): [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] yield resources [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self.driver.spawn(context, instance, image_meta, [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.475875] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] vm_ref = self.build_virtual_machine(instance, [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] for vif in network_info: [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return self._sync_wrapper(fn, *args, **kwargs) [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self.wait() [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self[:] = self._gt.wait() [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return self._exit_event.wait() [ 724.476301] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] result = hub.switch() [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return self.greenlet.switch() [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] result = function(*args, **kwargs) [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return func(*args, **kwargs) [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] raise e [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] nwinfo = self.network_api.allocate_for_instance( [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.476737] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] created_port_ids = self._update_ports_for_instance( [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] with excutils.save_and_reraise_exception(): [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self.force_reraise() [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] raise self.value [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] updated_port = self._update_port( [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] _ensure_no_port_binding_failure(port) [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.477169] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] raise exception.PortBindingFailed(port_id=port['id']) [ 724.477648] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 724.477648] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] [ 724.477648] env[61972]: INFO nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Terminating instance [ 724.671880] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.672382] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 724.675307] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.559s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.677121] env[61972]: INFO nova.compute.claims [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.767054] env[61972]: DEBUG nova.network.neutron [-] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.838030] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e1085a-3186-8a55-d989-76fc69cdd9e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010371} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.838311] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.838563] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 724.838816] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7baf209-3b92-4dc2-a711-2e4baead8144 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.845070] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 724.845070] env[61972]: value = "task-1389097" [ 724.845070] env[61972]: _type = "Task" [ 724.845070] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.852678] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.981829] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.981829] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.981829] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.181806] env[61972]: DEBUG nova.compute.utils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.185008] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 725.185190] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.235127] env[61972]: DEBUG nova.policy [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c7248c691674a519b4afaa86594e8e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bcff1b8013ed4fa7815acbd9e6db32c0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 725.270589] env[61972]: INFO nova.compute.manager [-] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Took 1.03 seconds to deallocate network for instance. [ 725.273063] env[61972]: DEBUG nova.compute.claims [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 725.273300] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.356691] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389097, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.518639] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.654254] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.687026] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Successfully created port: 6992765b-0676-40cb-85a8-bd255585b49e {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.689278] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 725.858718] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389097, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.813392} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.858718] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 725.858718] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.858718] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1adb7410-9ed4-4f5f-a136-96f46ca7535d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.867852] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 725.867852] env[61972]: value = "task-1389098" [ 725.867852] env[61972]: _type = "Task" [ 725.867852] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.878276] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389098, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.970807] env[61972]: DEBUG nova.compute.manager [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Received event network-changed-0739af50-d18a-4bb5-899a-198661218598 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 725.971015] env[61972]: DEBUG nova.compute.manager [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Refreshing instance network info cache due to event network-changed-0739af50-d18a-4bb5-899a-198661218598. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 725.971217] env[61972]: DEBUG oslo_concurrency.lockutils [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] Acquiring lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.078130] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c58429-856b-443d-9edf-73d08bc9d9b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.087802] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fa44b6-e717-4e69-a62d-61dfa926143a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.123823] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7904e844-f764-49de-9e05-7db022f460d5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.134763] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6444eb25-295d-40fc-8d76-7b6872179e1d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.147066] env[61972]: DEBUG nova.compute.provider_tree [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.157056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.157056] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 726.157056] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.159211] env[61972]: DEBUG oslo_concurrency.lockutils [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] Acquired lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.159211] env[61972]: DEBUG nova.network.neutron [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Refreshing network info cache for port 0739af50-d18a-4bb5-899a-198661218598 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.159211] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55c3b307-1189-4107-bce1-dcf95009e858 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.168396] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b822312f-f595-404d-bc5e-ed0ca0ab6fd8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.193240] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a978943b-afd3-44f4-b6c1-5a72dda8ca35 could not be found. [ 726.193475] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.193649] env[61972]: INFO nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Took 0.04 seconds to destroy the instance on the hypervisor. [ 726.193881] env[61972]: DEBUG oslo.service.loopingcall [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.194104] env[61972]: DEBUG nova.compute.manager [-] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 726.194200] env[61972]: DEBUG nova.network.neutron [-] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.197007] env[61972]: INFO nova.virt.block_device [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Booting with volume a2bb66ea-4013-4863-8bed-269b9d847e8f at /dev/sda [ 726.226817] env[61972]: DEBUG nova.network.neutron [-] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.260590] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-faa4ae2c-31a4-4a0d-a619-fd55f625eaf7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.269314] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e659719-0688-4718-8b75-379c8db63734 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.292290] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfbc2910-a362-48c2-ac38-74ac1618814c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.299886] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a497297c-4bb7-4399-ad14-02fd780ea647 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.324547] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f49200-5672-4043-86f4-9608f54a9019 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.331250] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b828d44-54f8-43ff-afb8-5ddcc20a32df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.346283] env[61972]: DEBUG nova.virt.block_device [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Updating existing volume attachment record: 7e251505-629d-4557-974e-ab5166a7c22b {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 726.379715] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389098, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084325} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.379963] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.380778] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfcc6c5-6e78-4c98-a575-83d8498789ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.400877] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.401171] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e837ba89-7930-4100-9ff1-b80af4ee4c9f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.425233] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 726.425233] env[61972]: value = "task-1389099" [ 726.425233] env[61972]: _type = "Task" [ 726.425233] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.434664] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.651423] env[61972]: DEBUG nova.scheduler.client.report [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 726.729577] env[61972]: DEBUG nova.network.neutron [-] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.743398] env[61972]: DEBUG nova.network.neutron [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.917890] env[61972]: DEBUG nova.network.neutron [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.936956] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389099, 'name': ReconfigVM_Task, 'duration_secs': 0.307351} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.936956] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 726.936956] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ec77387-1c83-454e-8344-b05b183fd6c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.943981] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 726.943981] env[61972]: value = "task-1389100" [ 726.943981] env[61972]: _type = "Task" [ 726.943981] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.952526] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389100, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.156859] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.157433] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 727.160080] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.254s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.163344] env[61972]: INFO nova.compute.claims [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.233771] env[61972]: ERROR nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 727.233771] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 727.233771] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.233771] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.233771] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.233771] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.233771] env[61972]: ERROR nova.compute.manager raise self.value [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.233771] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 727.233771] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.233771] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 727.234531] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.234531] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 727.234531] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 727.234531] env[61972]: ERROR nova.compute.manager [ 727.234531] env[61972]: Traceback (most recent call last): [ 727.234531] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 727.234531] env[61972]: listener.cb(fileno) [ 727.234531] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.234531] env[61972]: result = function(*args, **kwargs) [ 727.234531] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.234531] env[61972]: return func(*args, **kwargs) [ 727.234531] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 727.234531] env[61972]: raise e [ 727.234531] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 727.234531] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 727.234531] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.234531] env[61972]: created_port_ids = self._update_ports_for_instance( [ 727.234531] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.234531] env[61972]: with excutils.save_and_reraise_exception(): [ 727.234531] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.234531] env[61972]: self.force_reraise() [ 727.234531] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.234531] env[61972]: raise self.value [ 727.234531] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.234531] env[61972]: updated_port = self._update_port( [ 727.234531] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.234531] env[61972]: _ensure_no_port_binding_failure(port) [ 727.234531] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.234531] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 727.235457] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 727.235457] env[61972]: Removing descriptor: 19 [ 727.235457] env[61972]: INFO nova.compute.manager [-] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Took 1.04 seconds to deallocate network for instance. [ 727.236685] env[61972]: DEBUG nova.compute.claims [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 727.236855] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.420341] env[61972]: DEBUG oslo_concurrency.lockutils [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] Releasing lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.420664] env[61972]: DEBUG nova.compute.manager [req-a6872587-f015-43ed-8a70-96efb54294a8 req-6274bb9f-6666-48b1-9f69-4c7aad438f63 service nova] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Received event network-vif-deleted-0739af50-d18a-4bb5-899a-198661218598 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 727.455121] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389100, 'name': Rename_Task, 'duration_secs': 0.133813} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.455389] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.456050] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9b211a3-3375-4146-a6dd-e78bd765c24f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.462410] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 727.462410] env[61972]: value = "task-1389101" [ 727.462410] env[61972]: _type = "Task" [ 727.462410] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.470300] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.665907] env[61972]: DEBUG nova.compute.utils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.669185] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 727.669356] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.718297] env[61972]: DEBUG nova.policy [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e16b905bec4f7792956e0f764356d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29b2ebba1b424f44ba4ee898fbfdc1d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 727.980358] env[61972]: DEBUG oslo_vmware.api [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389101, 'name': PowerOnVM_Task, 'duration_secs': 0.449657} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.980672] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.980925] env[61972]: INFO nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Took 8.83 seconds to spawn the instance on the hypervisor. [ 727.981178] env[61972]: DEBUG nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 727.982035] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eadc32c-67c3-4b49-947c-294cc71bd00b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.015971] env[61972]: DEBUG nova.compute.manager [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Received event network-changed-6992765b-0676-40cb-85a8-bd255585b49e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 728.017086] env[61972]: DEBUG nova.compute.manager [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Refreshing instance network info cache due to event network-changed-6992765b-0676-40cb-85a8-bd255585b49e. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 728.017347] env[61972]: DEBUG oslo_concurrency.lockutils [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] Acquiring lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.017541] env[61972]: DEBUG oslo_concurrency.lockutils [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] Acquired lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.017686] env[61972]: DEBUG nova.network.neutron [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Refreshing network info cache for port 6992765b-0676-40cb-85a8-bd255585b49e {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.128724] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Successfully created port: 396f16dd-e25c-4099-a353-68109855f1fe {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.170385] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 728.482733] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 728.483403] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.483629] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.483881] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.483951] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.484104] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.484245] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.484442] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.484591] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.484745] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.484898] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.486435] env[61972]: DEBUG nova.virt.hardware [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.489747] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b88f4f-ee13-403b-90d1-513fa3250233 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.504996] env[61972]: INFO nova.compute.manager [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Took 28.60 seconds to build instance. [ 728.509442] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a3b864-7c5a-4c02-bcf1-2647bf60c9a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.526869] env[61972]: ERROR nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Traceback (most recent call last): [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] yield resources [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self.driver.spawn(context, instance, image_meta, [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] vm_ref = self.build_virtual_machine(instance, [ 728.526869] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] for vif in network_info: [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] return self._sync_wrapper(fn, *args, **kwargs) [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self.wait() [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self[:] = self._gt.wait() [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] return self._exit_event.wait() [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.527758] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] current.throw(*self._exc) [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] result = function(*args, **kwargs) [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] return func(*args, **kwargs) [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] raise e [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] nwinfo = self.network_api.allocate_for_instance( [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] created_port_ids = self._update_ports_for_instance( [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] with excutils.save_and_reraise_exception(): [ 728.528328] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self.force_reraise() [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] raise self.value [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] updated_port = self._update_port( [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] _ensure_no_port_binding_failure(port) [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] raise exception.PortBindingFailed(port_id=port['id']) [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 728.528835] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] [ 728.528835] env[61972]: INFO nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Terminating instance [ 728.556269] env[61972]: DEBUG nova.network.neutron [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.594030] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1390dd-52ca-43cb-b158-7f6d63aaa24b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.602800] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9335c10e-0828-45d2-8e34-96d6d50781de {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.639164] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1934f0-2968-4294-9c7a-dca465327bd2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.649351] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8947fc3d-da7a-4b9b-bae7-017d88af8753 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.664656] env[61972]: DEBUG nova.compute.provider_tree [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.731948] env[61972]: DEBUG nova.network.neutron [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.017605] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3508e6b6-2b44-4b01-81b1-77b32a43d6d1 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.504s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.039453] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquiring lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.167802] env[61972]: DEBUG nova.scheduler.client.report [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 729.180505] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 729.215416] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 729.215646] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 729.215794] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.216251] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 729.216251] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.216251] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 729.216459] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 729.216617] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 729.216832] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 729.216921] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 729.217197] env[61972]: DEBUG nova.virt.hardware [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 729.218156] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e503e2a-44b4-41a1-85dc-1ffaae970108 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.226537] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2177a73-cb7d-41df-80e5-ba871b2e35d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.240377] env[61972]: DEBUG oslo_concurrency.lockutils [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] Releasing lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.240630] env[61972]: DEBUG nova.compute.manager [req-acb51855-ced9-4ff3-a8cb-7971a2d99153 req-0ece519a-338a-4ac3-a62f-53415767bd6e service nova] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Received event network-vif-deleted-6992765b-0676-40cb-85a8-bd255585b49e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 729.241194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquired lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.241368] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.393617] env[61972]: ERROR nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 729.393617] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 729.393617] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.393617] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.393617] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.393617] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.393617] env[61972]: ERROR nova.compute.manager raise self.value [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.393617] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 729.393617] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.393617] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 729.394248] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.394248] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 729.394248] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 729.394248] env[61972]: ERROR nova.compute.manager [ 729.394248] env[61972]: Traceback (most recent call last): [ 729.394248] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 729.394248] env[61972]: listener.cb(fileno) [ 729.394248] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.394248] env[61972]: result = function(*args, **kwargs) [ 729.394248] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.394248] env[61972]: return func(*args, **kwargs) [ 729.394248] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 729.394248] env[61972]: raise e [ 729.394248] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 729.394248] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 729.394248] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.394248] env[61972]: created_port_ids = self._update_ports_for_instance( [ 729.394248] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.394248] env[61972]: with excutils.save_and_reraise_exception(): [ 729.394248] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.394248] env[61972]: self.force_reraise() [ 729.394248] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.394248] env[61972]: raise self.value [ 729.394248] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.394248] env[61972]: updated_port = self._update_port( [ 729.394248] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.394248] env[61972]: _ensure_no_port_binding_failure(port) [ 729.394248] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.394248] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 729.395470] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 729.395470] env[61972]: Removing descriptor: 19 [ 729.395470] env[61972]: ERROR nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Traceback (most recent call last): [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] yield resources [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self.driver.spawn(context, instance, image_meta, [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.395470] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] vm_ref = self.build_virtual_machine(instance, [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] for vif in network_info: [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return self._sync_wrapper(fn, *args, **kwargs) [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self.wait() [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self[:] = self._gt.wait() [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return self._exit_event.wait() [ 729.395892] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] result = hub.switch() [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return self.greenlet.switch() [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] result = function(*args, **kwargs) [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return func(*args, **kwargs) [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] raise e [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] nwinfo = self.network_api.allocate_for_instance( [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.396324] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] created_port_ids = self._update_ports_for_instance( [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] with excutils.save_and_reraise_exception(): [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self.force_reraise() [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] raise self.value [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] updated_port = self._update_port( [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] _ensure_no_port_binding_failure(port) [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.396731] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] raise exception.PortBindingFailed(port_id=port['id']) [ 729.397257] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 729.397257] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] [ 729.397257] env[61972]: INFO nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Terminating instance [ 729.503593] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.503768] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.521149] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 729.673114] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.673669] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 729.676199] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.536s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.811282] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.858835] env[61972]: INFO nova.compute.manager [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Rebuilding instance [ 729.902056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.902309] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquired lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.902542] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.910193] env[61972]: DEBUG nova.compute.manager [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 729.911327] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9191404-d180-4238-913d-03004b97f350 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.967286] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.008999] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.009185] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 730.009309] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 730.040204] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.063749] env[61972]: DEBUG nova.compute.manager [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Received event network-changed-396f16dd-e25c-4099-a353-68109855f1fe {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 730.063749] env[61972]: DEBUG nova.compute.manager [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Refreshing instance network info cache due to event network-changed-396f16dd-e25c-4099-a353-68109855f1fe. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 730.063749] env[61972]: DEBUG oslo_concurrency.lockutils [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] Acquiring lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.180965] env[61972]: DEBUG nova.compute.utils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 730.185678] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 730.185678] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.232629] env[61972]: DEBUG nova.policy [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e16b905bec4f7792956e0f764356d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29b2ebba1b424f44ba4ee898fbfdc1d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 730.432707] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.468690] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Releasing lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.469282] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 730.469606] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfa2571d-5fac-4c5a-bc9a-0b8e040c49cb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.482696] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c82744-694b-456e-bc74-a17ea9a29b82 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.515163] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 730.515434] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 730.515624] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 730.515810] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 730.516062] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 730.516260] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 730.516519] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "refresh_cache-9fd9fc35-7105-4941-8e05-cf4e45bb5d29" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.516703] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquired lock "refresh_cache-9fd9fc35-7105-4941-8e05-cf4e45bb5d29" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.516882] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Forcefully refreshing network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 730.517118] env[61972]: DEBUG nova.objects.instance [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lazy-loading 'info_cache' on Instance uuid 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.519089] env[61972]: WARNING nova.virt.vmwareapi.driver [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance e0735ee2-0a9d-4291-8465-b644816bf8e3 could not be found. [ 730.520515] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.524577] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb184131-dfc1-4c44-bd56-dbc51713b9f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.535281] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bcd801-6c02-4c1e-8769-2a14196ea1cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.567461] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e0735ee2-0a9d-4291-8465-b644816bf8e3 could not be found. [ 730.567707] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.567916] env[61972]: INFO nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Took 0.10 seconds to destroy the instance on the hypervisor. [ 730.571979] env[61972]: DEBUG oslo.service.loopingcall [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.571979] env[61972]: DEBUG nova.compute.manager [-] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 730.571979] env[61972]: DEBUG nova.network.neutron [-] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.572568] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81581658-a763-4181-bb3e-c87d05a367b1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.584237] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c57d81-9876-4705-a94b-45da3f9a0dfe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.615879] env[61972]: DEBUG nova.network.neutron [-] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.617648] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910f11fd-9e34-4f7f-ac7f-01c6d5659445 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.625857] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42866e36-db15-4023-92a6-ff1d0e868005 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.642650] env[61972]: DEBUG nova.compute.provider_tree [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.644797] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.686109] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 730.703949] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Successfully created port: 6bc96103-47f0-437c-b043-990f91968c0e {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.929288] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.929288] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9cd1086-5d30-4144-b2d5-9abd4608327b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.934880] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 730.934880] env[61972]: value = "task-1389102" [ 730.934880] env[61972]: _type = "Task" [ 730.934880] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.945422] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389102, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.121690] env[61972]: DEBUG nova.network.neutron [-] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.147285] env[61972]: DEBUG nova.scheduler.client.report [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 731.150873] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Releasing lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.151220] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 731.151412] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.152377] env[61972]: DEBUG oslo_concurrency.lockutils [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] Acquired lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.152596] env[61972]: DEBUG nova.network.neutron [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Refreshing network info cache for port 396f16dd-e25c-4099-a353-68109855f1fe {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 731.153662] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1abb833-90b7-4e81-bbc1-1d027912c214 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.162646] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfb04af-9c7d-4c8b-994f-a457583ad521 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.188115] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56488ac6-c94b-4b40-9cad-b0c36a3d293e could not be found. [ 731.188345] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.188523] env[61972]: INFO nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 731.188760] env[61972]: DEBUG oslo.service.loopingcall [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.188980] env[61972]: DEBUG nova.compute.manager [-] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 731.189088] env[61972]: DEBUG nova.network.neutron [-] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.209903] env[61972]: DEBUG nova.network.neutron [-] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.445356] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389102, 'name': PowerOffVM_Task, 'duration_secs': 0.119535} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.445618] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.445846] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.446594] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf54d671-5396-4156-8442-7652e6e1a011 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.453183] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.453349] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b59ff1d-2ccf-43c3-bfd5-10d5d2558545 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.476394] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.476599] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.476779] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleting the datastore file [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.477029] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a223e8b6-8af8-4d98-ad2d-0daa0b01afc1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.482611] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 731.482611] env[61972]: value = "task-1389104" [ 731.482611] env[61972]: _type = "Task" [ 731.482611] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.490724] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.549242] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.627999] env[61972]: INFO nova.compute.manager [-] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Took 1.06 seconds to deallocate network for instance. [ 731.656048] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.977s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.656048] env[61972]: ERROR nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Traceback (most recent call last): [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self.driver.spawn(context, instance, image_meta, [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.656048] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] vm_ref = self.build_virtual_machine(instance, [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] for vif in network_info: [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return self._sync_wrapper(fn, *args, **kwargs) [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self.wait() [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self[:] = self._gt.wait() [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return self._exit_event.wait() [ 731.656736] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] result = hub.switch() [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return self.greenlet.switch() [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] result = function(*args, **kwargs) [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] return func(*args, **kwargs) [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] raise e [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] nwinfo = self.network_api.allocate_for_instance( [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.657163] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] created_port_ids = self._update_ports_for_instance( [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] with excutils.save_and_reraise_exception(): [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] self.force_reraise() [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] raise self.value [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] updated_port = self._update_port( [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] _ensure_no_port_binding_failure(port) [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.657559] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] raise exception.PortBindingFailed(port_id=port['id']) [ 731.657930] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] nova.exception.PortBindingFailed: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. [ 731.657930] env[61972]: ERROR nova.compute.manager [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] [ 731.657930] env[61972]: DEBUG nova.compute.utils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 731.661546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.719s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.661546] env[61972]: INFO nova.compute.claims [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.666814] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Build of instance 22634f52-c696-417b-bfe9-0a7ca62aad40 was re-scheduled: Binding failed for port 5afa0c7c-d65e-4136-a05f-576cf397cf44, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 731.668314] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 731.668314] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.668314] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.668314] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.695974] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 731.713057] env[61972]: DEBUG nova.network.neutron [-] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.723282] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.723859] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.724185] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.724524] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.725094] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.725374] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.728018] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.728018] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.728018] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.728018] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.728018] env[61972]: DEBUG nova.virt.hardware [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.728243] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ed0c17-4aa3-427e-b85f-eee1c0eb4dfc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.736409] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e778d364-c8c1-4686-8544-a470754c8c61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.825613] env[61972]: DEBUG nova.network.neutron [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.986654] env[61972]: DEBUG nova.network.neutron [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.000406] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108792} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.001255] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.001443] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.001614] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.103773] env[61972]: ERROR nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 732.103773] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 732.103773] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.103773] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.103773] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.103773] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.103773] env[61972]: ERROR nova.compute.manager raise self.value [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.103773] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 732.103773] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.103773] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 732.104269] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.104269] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 732.104269] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 732.104269] env[61972]: ERROR nova.compute.manager [ 732.104269] env[61972]: Traceback (most recent call last): [ 732.104269] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 732.104269] env[61972]: listener.cb(fileno) [ 732.104269] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.104269] env[61972]: result = function(*args, **kwargs) [ 732.104269] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 732.104269] env[61972]: return func(*args, **kwargs) [ 732.104269] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 732.104269] env[61972]: raise e [ 732.104269] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 732.104269] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 732.104269] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.104269] env[61972]: created_port_ids = self._update_ports_for_instance( [ 732.104269] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.104269] env[61972]: with excutils.save_and_reraise_exception(): [ 732.104269] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.104269] env[61972]: self.force_reraise() [ 732.104269] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.104269] env[61972]: raise self.value [ 732.104269] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.104269] env[61972]: updated_port = self._update_port( [ 732.104269] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.104269] env[61972]: _ensure_no_port_binding_failure(port) [ 732.104269] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.104269] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 732.105098] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 732.105098] env[61972]: Removing descriptor: 19 [ 732.105098] env[61972]: ERROR nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Traceback (most recent call last): [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] yield resources [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self.driver.spawn(context, instance, image_meta, [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.105098] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] vm_ref = self.build_virtual_machine(instance, [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] for vif in network_info: [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return self._sync_wrapper(fn, *args, **kwargs) [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self.wait() [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self[:] = self._gt.wait() [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return self._exit_event.wait() [ 732.105447] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] result = hub.switch() [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return self.greenlet.switch() [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] result = function(*args, **kwargs) [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return func(*args, **kwargs) [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] raise e [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] nwinfo = self.network_api.allocate_for_instance( [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.105808] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] created_port_ids = self._update_ports_for_instance( [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] with excutils.save_and_reraise_exception(): [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self.force_reraise() [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] raise self.value [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] updated_port = self._update_port( [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] _ensure_no_port_binding_failure(port) [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.106189] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] raise exception.PortBindingFailed(port_id=port['id']) [ 732.106579] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 732.106579] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] [ 732.106579] env[61972]: INFO nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Terminating instance [ 732.151848] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.191029] env[61972]: INFO nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Took 0.56 seconds to detach 1 volumes for instance. [ 732.193772] env[61972]: DEBUG nova.compute.claims [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 732.194104] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.207164] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.219777] env[61972]: INFO nova.compute.manager [-] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Took 1.03 seconds to deallocate network for instance. [ 732.221151] env[61972]: DEBUG nova.compute.claims [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 732.221981] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.235214] env[61972]: DEBUG nova.compute.manager [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Received event network-changed-6bc96103-47f0-437c-b043-990f91968c0e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 732.235415] env[61972]: DEBUG nova.compute.manager [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Refreshing instance network info cache due to event network-changed-6bc96103-47f0-437c-b043-990f91968c0e. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 732.235705] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] Acquiring lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.235859] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] Acquired lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.236033] env[61972]: DEBUG nova.network.neutron [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Refreshing network info cache for port 6bc96103-47f0-437c-b043-990f91968c0e {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 732.303182] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.492732] env[61972]: DEBUG oslo_concurrency.lockutils [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] Releasing lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.492732] env[61972]: DEBUG nova.compute.manager [req-3261a8aa-e4a0-4c18-bec1-ece90c04c7be req-048cb4cb-2afe-4beb-aaf3-7a2fbb06933b service nova] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Received event network-vif-deleted-396f16dd-e25c-4099-a353-68109855f1fe {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 732.609876] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.658586] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Releasing lock "refresh_cache-9fd9fc35-7105-4941-8e05-cf4e45bb5d29" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.658793] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Updated the network info_cache for instance {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 732.659015] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.659183] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.659414] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.659615] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.659884] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.660020] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.660175] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 732.660325] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.757265] env[61972]: DEBUG nova.network.neutron [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.805928] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "refresh_cache-22634f52-c696-417b-bfe9-0a7ca62aad40" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.806227] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 732.806445] env[61972]: DEBUG nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 732.806637] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.826500] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.852721] env[61972]: DEBUG nova.network.neutron [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.988409] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dae5ac2-e677-4d49-bbe2-f6c0c31dc661 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.996094] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce80b7f-dedd-4ff2-a53e-2a23e23e96bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.028205] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51781586-5c41-4935-b3f5-87b4ad7c58b2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.035672] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d8d0c7-68ac-4348-8ebf-1825be876421 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.051529] env[61972]: DEBUG nova.compute.provider_tree [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.058460] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.058694] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.058846] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.059042] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 733.059192] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.059330] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 733.059535] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 733.059690] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 733.059849] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 733.060010] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 733.060189] env[61972]: DEBUG nova.virt.hardware [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 733.060943] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af764df6-eced-49d2-879e-b79b925201ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.068197] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73ce708-af0d-4904-b8fd-9edd7af8f332 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.081237] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.086702] env[61972]: DEBUG oslo.service.loopingcall [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 733.086926] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.087130] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c4070e1-ade6-4ce1-8ca2-1b4087d1f26d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.103336] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.103336] env[61972]: value = "task-1389105" [ 733.103336] env[61972]: _type = "Task" [ 733.103336] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.113982] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389105, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.163608] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.328911] env[61972]: DEBUG nova.network.neutron [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.354752] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a3e57c4-8495-4ae0-8501-60f75160bb23 req-8eabc567-eebd-46f4-8438-9e37f9909dc3 service nova] Releasing lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.355014] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquired lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.355214] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.555034] env[61972]: DEBUG nova.scheduler.client.report [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 733.613014] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389105, 'name': CreateVM_Task, 'duration_secs': 0.254288} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.613293] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 733.613735] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.613891] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.614216] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 733.614448] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-866342ce-d1c6-4040-8c37-bb2e66c5bddf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.618662] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 733.618662] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d8a61e-1681-9404-64cc-34180a76991d" [ 733.618662] env[61972]: _type = "Task" [ 733.618662] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.626387] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d8a61e-1681-9404-64cc-34180a76991d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.831148] env[61972]: INFO nova.compute.manager [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 22634f52-c696-417b-bfe9-0a7ca62aad40] Took 1.02 seconds to deallocate network for instance. [ 733.873030] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.965574] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.060064] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.060575] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 734.062998] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.747s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.129014] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d8a61e-1681-9404-64cc-34180a76991d, 'name': SearchDatastore_Task, 'duration_secs': 0.008976} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.130021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.130021] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.130021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.130021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.130342] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.130382] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18760202-571f-4c87-87f9-90cbfcd5a4f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.138147] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.138318] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.138989] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69fdd01c-c2af-46fd-8d1c-581c7cd4c822 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.143674] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 734.143674] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5242fedd-dfd9-6f49-0ed7-e513aa11cae6" [ 734.143674] env[61972]: _type = "Task" [ 734.143674] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.152102] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5242fedd-dfd9-6f49-0ed7-e513aa11cae6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.274859] env[61972]: DEBUG nova.compute.manager [req-ed019424-d440-4f1b-84ba-a3896f0b5c2e req-15003cd3-3cdf-459f-9103-0bd7ed1593f5 service nova] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Received event network-vif-deleted-6bc96103-47f0-437c-b043-990f91968c0e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 734.468368] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Releasing lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.468746] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 734.468936] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 734.469296] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba502c1d-fc30-4d2c-98cf-b2c7c4afd0b6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.478419] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8308b554-0e6f-4d7f-ae37-fcff3ef40abe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.501082] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2725d6ed-89d9-479f-b6ee-d16523e0abab could not be found. [ 734.501311] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.501485] env[61972]: INFO nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Took 0.03 seconds to destroy the instance on the hypervisor. [ 734.501726] env[61972]: DEBUG oslo.service.loopingcall [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.501988] env[61972]: DEBUG nova.compute.manager [-] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 734.502101] env[61972]: DEBUG nova.network.neutron [-] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.520818] env[61972]: DEBUG nova.network.neutron [-] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.568165] env[61972]: DEBUG nova.compute.utils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 734.572982] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 734.573225] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 734.630038] env[61972]: DEBUG nova.policy [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cead1a9886b40aebdd867a47a303bbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e125bb8c48284c46ab87edd0604a6578', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 734.653985] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5242fedd-dfd9-6f49-0ed7-e513aa11cae6, 'name': SearchDatastore_Task, 'duration_secs': 0.009213} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.657016] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-075343ab-76f8-42f7-abc3-a9174bdef562 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.661976] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 734.661976] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52634c95-dafb-f739-0b8a-abe26f424438" [ 734.661976] env[61972]: _type = "Task" [ 734.661976] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.670974] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52634c95-dafb-f739-0b8a-abe26f424438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.858881] env[61972]: INFO nova.scheduler.client.report [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleted allocations for instance 22634f52-c696-417b-bfe9-0a7ca62aad40 [ 734.877593] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3073dbb0-621e-43d8-99fa-27d3a990f22c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.885694] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d68bd5b-d439-4f77-b5d0-1b116601e9bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.917169] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e86ec7-1877-44ef-ac46-13fcde275187 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.924942] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc62c095-20c4-4365-bad8-43121b0f458a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.938435] env[61972]: DEBUG nova.compute.provider_tree [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.980889] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Successfully created port: 1a703979-d671-41f7-845a-aa6e43246c2c {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.023151] env[61972]: DEBUG nova.network.neutron [-] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.073647] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 735.174499] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52634c95-dafb-f739-0b8a-abe26f424438, 'name': SearchDatastore_Task, 'duration_secs': 0.00926} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.174794] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.175068] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.175319] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c83a990e-3e99-4b00-b942-1f918854d1e1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.182111] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 735.182111] env[61972]: value = "task-1389106" [ 735.182111] env[61972]: _type = "Task" [ 735.182111] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.190194] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.369894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43abb3bf-cb8c-4846-9054-d6caa7c59d41 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "22634f52-c696-417b-bfe9-0a7ca62aad40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.038s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.442341] env[61972]: DEBUG nova.scheduler.client.report [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 735.532764] env[61972]: INFO nova.compute.manager [-] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Took 1.03 seconds to deallocate network for instance. [ 735.534107] env[61972]: DEBUG nova.compute.claims [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 735.534575] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.694302] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389106, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478791} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.694916] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 735.697531] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 735.697531] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74aacb80-1a3f-468b-8741-10c78da2c1fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.704021] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 735.704021] env[61972]: value = "task-1389107" [ 735.704021] env[61972]: _type = "Task" [ 735.704021] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.712817] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389107, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.872875] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 735.952580] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.889s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.954862] env[61972]: ERROR nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Traceback (most recent call last): [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self.driver.spawn(context, instance, image_meta, [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] vm_ref = self.build_virtual_machine(instance, [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] vif_infos = vmwarevif.get_vif_info(self._session, [ 735.954862] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] for vif in network_info: [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return self._sync_wrapper(fn, *args, **kwargs) [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self.wait() [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self[:] = self._gt.wait() [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return self._exit_event.wait() [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] result = hub.switch() [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 735.955274] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return self.greenlet.switch() [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] result = function(*args, **kwargs) [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] return func(*args, **kwargs) [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] raise e [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] nwinfo = self.network_api.allocate_for_instance( [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] created_port_ids = self._update_ports_for_instance( [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] with excutils.save_and_reraise_exception(): [ 735.955621] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] self.force_reraise() [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] raise self.value [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] updated_port = self._update_port( [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] _ensure_no_port_binding_failure(port) [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] raise exception.PortBindingFailed(port_id=port['id']) [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] nova.exception.PortBindingFailed: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. [ 735.956148] env[61972]: ERROR nova.compute.manager [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] [ 735.956449] env[61972]: DEBUG nova.compute.utils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 735.956449] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.493s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.958281] env[61972]: INFO nova.compute.claims [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.960932] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Build of instance 2ba9f652-c274-4d79-84a2-ad1384c99b91 was re-scheduled: Binding failed for port 886b147b-8c50-447e-91a7-17d70f5bc53e, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 735.961371] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 735.961594] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquiring lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.961737] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Acquired lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.962297] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.091879] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 736.119496] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 736.119733] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 736.119884] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.120106] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 736.120273] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.120436] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 736.120699] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 736.120863] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 736.121082] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 736.121200] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 736.121368] env[61972]: DEBUG nova.virt.hardware [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 736.122260] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7062751-d0b3-4112-a415-4f4f599e702b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.131401] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ebdef3-a887-4da9-9202-ebb3275db948 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.210901] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389107, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067328} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.211181] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 736.211933] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7cd78a-c390-410c-b91a-3ad331a04e29 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.231808] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 736.231808] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ab4b453-9567-41fd-8f9d-62fde8eec5cb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.251938] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 736.251938] env[61972]: value = "task-1389108" [ 736.251938] env[61972]: _type = "Task" [ 736.251938] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.260399] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.271168] env[61972]: ERROR nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 736.271168] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 736.271168] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.271168] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.271168] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.271168] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.271168] env[61972]: ERROR nova.compute.manager raise self.value [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.271168] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 736.271168] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.271168] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 736.271761] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.271761] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 736.271761] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 736.271761] env[61972]: ERROR nova.compute.manager [ 736.271761] env[61972]: Traceback (most recent call last): [ 736.271761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 736.271761] env[61972]: listener.cb(fileno) [ 736.271761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.271761] env[61972]: result = function(*args, **kwargs) [ 736.271761] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 736.271761] env[61972]: return func(*args, **kwargs) [ 736.271761] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 736.271761] env[61972]: raise e [ 736.271761] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 736.271761] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 736.271761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.271761] env[61972]: created_port_ids = self._update_ports_for_instance( [ 736.271761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.271761] env[61972]: with excutils.save_and_reraise_exception(): [ 736.271761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.271761] env[61972]: self.force_reraise() [ 736.271761] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.271761] env[61972]: raise self.value [ 736.271761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.271761] env[61972]: updated_port = self._update_port( [ 736.271761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.271761] env[61972]: _ensure_no_port_binding_failure(port) [ 736.271761] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.271761] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 736.272746] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 736.272746] env[61972]: Removing descriptor: 21 [ 736.272746] env[61972]: ERROR nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Traceback (most recent call last): [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] yield resources [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self.driver.spawn(context, instance, image_meta, [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.272746] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] vm_ref = self.build_virtual_machine(instance, [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] for vif in network_info: [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return self._sync_wrapper(fn, *args, **kwargs) [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self.wait() [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self[:] = self._gt.wait() [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return self._exit_event.wait() [ 736.273149] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] result = hub.switch() [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return self.greenlet.switch() [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] result = function(*args, **kwargs) [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return func(*args, **kwargs) [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] raise e [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] nwinfo = self.network_api.allocate_for_instance( [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.273563] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] created_port_ids = self._update_ports_for_instance( [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] with excutils.save_and_reraise_exception(): [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self.force_reraise() [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] raise self.value [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] updated_port = self._update_port( [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] _ensure_no_port_binding_failure(port) [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.273969] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] raise exception.PortBindingFailed(port_id=port['id']) [ 736.274386] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 736.274386] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] [ 736.274386] env[61972]: INFO nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Terminating instance [ 736.329343] env[61972]: DEBUG nova.compute.manager [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Received event network-changed-1a703979-d671-41f7-845a-aa6e43246c2c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 736.329343] env[61972]: DEBUG nova.compute.manager [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Refreshing instance network info cache due to event network-changed-1a703979-d671-41f7-845a-aa6e43246c2c. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 736.329343] env[61972]: DEBUG oslo_concurrency.lockutils [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] Acquiring lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.329473] env[61972]: DEBUG oslo_concurrency.lockutils [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] Acquired lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.330139] env[61972]: DEBUG nova.network.neutron [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Refreshing network info cache for port 1a703979-d671-41f7-845a-aa6e43246c2c {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 736.396679] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.492326] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.634880] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.762568] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389108, 'name': ReconfigVM_Task, 'duration_secs': 0.320116} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.762864] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.763518] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2ac5ec1-25bb-4954-b1f5-c07b45394be3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.770387] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 736.770387] env[61972]: value = "task-1389109" [ 736.770387] env[61972]: _type = "Task" [ 736.770387] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.780199] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389109, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.783745] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquiring lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.866279] env[61972]: DEBUG nova.network.neutron [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.925285] env[61972]: DEBUG nova.network.neutron [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.139540] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Releasing lock "refresh_cache-2ba9f652-c274-4d79-84a2-ad1384c99b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.139540] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 737.139540] env[61972]: DEBUG nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 737.139540] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.153199] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.280199] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389109, 'name': Rename_Task, 'duration_secs': 0.143265} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.282576] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.283048] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b95e0a8-16f5-4a16-b3cd-c2e389bc0277 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.289456] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 737.289456] env[61972]: value = "task-1389110" [ 737.289456] env[61972]: _type = "Task" [ 737.289456] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.293731] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a75f7ee-e500-4650-89b6-0f215edb5904 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.301562] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.304322] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f087658-48d3-4e7d-882b-b4ae2d5a301e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.336901] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a56b9f4-82c2-486a-aeb6-bceb95f55e13 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.344409] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d821ece-a9f8-4628-a7bc-dfaf110ead5b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.357794] env[61972]: DEBUG nova.compute.provider_tree [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.407382] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "89cbc6ec-7546-443c-9abb-47940d223daa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.407627] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "89cbc6ec-7546-443c-9abb-47940d223daa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.429017] env[61972]: DEBUG oslo_concurrency.lockutils [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] Releasing lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.429017] env[61972]: DEBUG nova.compute.manager [req-0e82d76d-0e67-4b15-8de4-f6f2ca5fb22c req-adcde1af-65e9-4447-bf7f-9b532033a9f4 service nova] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Received event network-vif-deleted-1a703979-d671-41f7-845a-aa6e43246c2c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 737.430409] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquired lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.430740] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.432213] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "caad50a8-e0ad-4ca9-b391-691ead1756f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.432585] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.656807] env[61972]: DEBUG nova.network.neutron [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.801041] env[61972]: DEBUG oslo_vmware.api [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389110, 'name': PowerOnVM_Task, 'duration_secs': 0.40725} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.801326] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 737.801530] env[61972]: DEBUG nova.compute.manager [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 737.802324] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e68b775-ebf8-4640-b301-c7a56885f5f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.864844] env[61972]: DEBUG nova.scheduler.client.report [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 737.948391] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.038566] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.159372] env[61972]: INFO nova.compute.manager [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] [instance: 2ba9f652-c274-4d79-84a2-ad1384c99b91] Took 1.02 seconds to deallocate network for instance. [ 738.325283] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.366996] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.367650] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 738.370497] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.097s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.541810] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Releasing lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.542379] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 738.542595] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.542924] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ff0665e-570d-4b58-a17d-e665d1d802ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.564823] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc6838b-2190-4f8c-bb9d-52ff453c17c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.590458] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b0039dd-1219-465d-beb8-0262e0e40029 could not be found. [ 738.590696] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 738.590867] env[61972]: INFO nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Took 0.05 seconds to destroy the instance on the hypervisor. [ 738.591115] env[61972]: DEBUG oslo.service.loopingcall [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.591324] env[61972]: DEBUG nova.compute.manager [-] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 738.591420] env[61972]: DEBUG nova.network.neutron [-] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 738.604439] env[61972]: DEBUG nova.network.neutron [-] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.877021] env[61972]: DEBUG nova.compute.utils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.879757] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 738.880076] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.943144] env[61972]: DEBUG nova.policy [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86d64c39e69143c68bc1141204de79bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2504e08ff42347dba127da83a8c0fc3a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.062841] env[61972]: INFO nova.compute.manager [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Rebuilding instance [ 739.106968] env[61972]: DEBUG nova.network.neutron [-] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.110462] env[61972]: DEBUG nova.compute.manager [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 739.111329] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413baed9-668f-4c23-85ae-c0daf391c005 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.201427] env[61972]: INFO nova.scheduler.client.report [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Deleted allocations for instance 2ba9f652-c274-4d79-84a2-ad1384c99b91 [ 739.309144] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceb30fe-68ed-4308-b747-cc50a5268b59 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.317693] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165759ed-1978-4875-be66-c1608e50338e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.351826] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Successfully created port: 99bf816e-e60d-4917-87ee-03be0502915b {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.354196] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b76d60-5a5e-458b-bcad-7ffc41937d3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.362786] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77938797-f66e-4898-9d41-453022bdc698 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.377027] env[61972]: DEBUG nova.compute.provider_tree [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.383641] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 739.617575] env[61972]: INFO nova.compute.manager [-] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Took 1.03 seconds to deallocate network for instance. [ 739.621236] env[61972]: DEBUG nova.compute.claims [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 739.621787] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.712126] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0af20c12-d4b0-461f-9229-ded711c238ce tempest-VolumesAdminNegativeTest-509776245 tempest-VolumesAdminNegativeTest-509776245-project-member] Lock "2ba9f652-c274-4d79-84a2-ad1384c99b91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.265s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.880017] env[61972]: DEBUG nova.scheduler.client.report [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 740.132740] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 740.134263] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4946cde5-4c88-4c06-bfe6-175fe815ac52 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.141782] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 740.141782] env[61972]: value = "task-1389111" [ 740.141782] env[61972]: _type = "Task" [ 740.141782] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.153728] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389111, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.215436] env[61972]: DEBUG nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 740.385549] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.386242] env[61972]: ERROR nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Traceback (most recent call last): [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self.driver.spawn(context, instance, image_meta, [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] vm_ref = self.build_virtual_machine(instance, [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.386242] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] for vif in network_info: [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return self._sync_wrapper(fn, *args, **kwargs) [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self.wait() [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self[:] = self._gt.wait() [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return self._exit_event.wait() [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] result = hub.switch() [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.386700] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return self.greenlet.switch() [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] result = function(*args, **kwargs) [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] return func(*args, **kwargs) [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] raise e [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] nwinfo = self.network_api.allocate_for_instance( [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] created_port_ids = self._update_ports_for_instance( [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] with excutils.save_and_reraise_exception(): [ 740.387100] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] self.force_reraise() [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] raise self.value [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] updated_port = self._update_port( [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] _ensure_no_port_binding_failure(port) [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] raise exception.PortBindingFailed(port_id=port['id']) [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] nova.exception.PortBindingFailed: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. [ 740.387530] env[61972]: ERROR nova.compute.manager [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] [ 740.387838] env[61972]: DEBUG nova.compute.utils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.389010] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.152s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.392617] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Build of instance e0d51c99-1916-4d66-a141-dfa5d4357174 was re-scheduled: Binding failed for port c2f9a5cd-c75e-4a15-b45d-819681846033, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 740.393105] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 740.393361] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquiring lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.393489] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Acquired lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.393644] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.399394] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 740.420845] env[61972]: DEBUG nova.compute.manager [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Received event network-changed-99bf816e-e60d-4917-87ee-03be0502915b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 740.421214] env[61972]: DEBUG nova.compute.manager [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Refreshing instance network info cache due to event network-changed-99bf816e-e60d-4917-87ee-03be0502915b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 740.421319] env[61972]: DEBUG oslo_concurrency.lockutils [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] Acquiring lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.421402] env[61972]: DEBUG oslo_concurrency.lockutils [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] Acquired lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.421578] env[61972]: DEBUG nova.network.neutron [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Refreshing network info cache for port 99bf816e-e60d-4917-87ee-03be0502915b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.440864] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:11:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='321868771',id=33,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-166850163',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.441125] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.441281] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.441460] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.441605] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.441748] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.441952] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.442663] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.443261] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.443551] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.444125] env[61972]: DEBUG nova.virt.hardware [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.446329] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd6ec32-d85d-4397-8f1b-366566f6a179 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.455739] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0327318-b465-47af-a32f-48c3b9f923c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.582403] env[61972]: ERROR nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 740.582403] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.582403] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.582403] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.582403] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.582403] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.582403] env[61972]: ERROR nova.compute.manager raise self.value [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.582403] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 740.582403] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.582403] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 740.583030] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.583030] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 740.583030] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 740.583030] env[61972]: ERROR nova.compute.manager [ 740.583030] env[61972]: Traceback (most recent call last): [ 740.583030] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 740.583030] env[61972]: listener.cb(fileno) [ 740.583030] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.583030] env[61972]: result = function(*args, **kwargs) [ 740.583030] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.583030] env[61972]: return func(*args, **kwargs) [ 740.583030] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.583030] env[61972]: raise e [ 740.583030] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.583030] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 740.583030] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.583030] env[61972]: created_port_ids = self._update_ports_for_instance( [ 740.583030] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.583030] env[61972]: with excutils.save_and_reraise_exception(): [ 740.583030] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.583030] env[61972]: self.force_reraise() [ 740.583030] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.583030] env[61972]: raise self.value [ 740.583030] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.583030] env[61972]: updated_port = self._update_port( [ 740.583030] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.583030] env[61972]: _ensure_no_port_binding_failure(port) [ 740.583030] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.583030] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 740.583967] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 740.583967] env[61972]: Removing descriptor: 21 [ 740.583967] env[61972]: ERROR nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Traceback (most recent call last): [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] yield resources [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self.driver.spawn(context, instance, image_meta, [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.583967] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] vm_ref = self.build_virtual_machine(instance, [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] for vif in network_info: [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return self._sync_wrapper(fn, *args, **kwargs) [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self.wait() [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self[:] = self._gt.wait() [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return self._exit_event.wait() [ 740.584373] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] result = hub.switch() [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return self.greenlet.switch() [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] result = function(*args, **kwargs) [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return func(*args, **kwargs) [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] raise e [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] nwinfo = self.network_api.allocate_for_instance( [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.584793] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] created_port_ids = self._update_ports_for_instance( [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] with excutils.save_and_reraise_exception(): [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self.force_reraise() [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] raise self.value [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] updated_port = self._update_port( [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] _ensure_no_port_binding_failure(port) [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.585217] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] raise exception.PortBindingFailed(port_id=port['id']) [ 740.585621] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 740.585621] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] [ 740.585621] env[61972]: INFO nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Terminating instance [ 740.651929] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389111, 'name': PowerOffVM_Task, 'duration_secs': 0.218219} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.652268] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.652602] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.653387] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a11bc6-1ff5-4dd6-9c8b-bd9d7544c0ad {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.660887] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.661119] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71e7128a-0a34-415d-88d9-9b8775670d7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.689465] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.689465] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.689465] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Deleting the datastore file [datastore2] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.689465] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef0f0168-987c-448d-a51a-f66a3ea91ee9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.693749] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 740.693749] env[61972]: value = "task-1389113" [ 740.693749] env[61972]: _type = "Task" [ 740.693749] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.701692] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.748173] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.916820] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.950587] env[61972]: DEBUG nova.network.neutron [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.087724] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquiring lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.103215] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.108033] env[61972]: DEBUG nova.network.neutron [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.204308] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123935} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.204308] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.204308] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.205158] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.340613] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19caa29c-4663-442d-b444-f1878c66128a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.349363] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc6748b-250d-4bb7-86dd-f9355dd03591 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.379038] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d399d459-6d86-4c8b-99ce-f617868a8e01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.386928] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1605e0e-9aba-49d1-b8d6-7eac6cad5fe9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.401601] env[61972]: DEBUG nova.compute.provider_tree [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.609127] env[61972]: DEBUG oslo_concurrency.lockutils [req-b817b770-3b43-4428-8f16-d4caa568f37a req-8fdaa65c-a526-4091-971e-b1b742c443d3 service nova] Releasing lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.609127] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Releasing lock "refresh_cache-e0d51c99-1916-4d66-a141-dfa5d4357174" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.609601] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 741.609601] env[61972]: DEBUG nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 741.609770] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.611587] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquired lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.611700] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.627956] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.905404] env[61972]: DEBUG nova.scheduler.client.report [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 742.130239] env[61972]: DEBUG nova.network.neutron [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.143721] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.243803] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.256633] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.256858] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.257017] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.257236] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.257378] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.257519] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.257722] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.257875] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.258033] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.258196] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 742.258366] env[61972]: DEBUG nova.virt.hardware [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 742.261078] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c4e127-2962-4979-93c8-581639e1e2ac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.267920] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfe8620-3c3d-4e7d-92b3-428850f68818 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.283953] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.290184] env[61972]: DEBUG oslo.service.loopingcall [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.290488] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.290730] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-355748dd-2eb9-4831-80d7-b9bfb0c92bac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.310879] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.310879] env[61972]: value = "task-1389114" [ 742.310879] env[61972]: _type = "Task" [ 742.310879] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.318987] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389114, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.410489] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.022s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.411430] env[61972]: ERROR nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Traceback (most recent call last): [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self.driver.spawn(context, instance, image_meta, [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] vm_ref = self.build_virtual_machine(instance, [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.411430] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] for vif in network_info: [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return self._sync_wrapper(fn, *args, **kwargs) [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self.wait() [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self[:] = self._gt.wait() [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return self._exit_event.wait() [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] result = hub.switch() [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 742.411894] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return self.greenlet.switch() [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] result = function(*args, **kwargs) [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] return func(*args, **kwargs) [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] raise e [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] nwinfo = self.network_api.allocate_for_instance( [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] created_port_ids = self._update_ports_for_instance( [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] with excutils.save_and_reraise_exception(): [ 742.412351] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] self.force_reraise() [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] raise self.value [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] updated_port = self._update_port( [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] _ensure_no_port_binding_failure(port) [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] raise exception.PortBindingFailed(port_id=port['id']) [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] nova.exception.PortBindingFailed: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. [ 742.412698] env[61972]: ERROR nova.compute.manager [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] [ 742.413071] env[61972]: DEBUG nova.compute.utils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 742.416017] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.374s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.416017] env[61972]: INFO nova.compute.claims [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.418695] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Build of instance a978943b-afd3-44f4-b6c1-5a72dda8ca35 was re-scheduled: Binding failed for port 0739af50-d18a-4bb5-899a-198661218598, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 742.419192] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 742.419461] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.419638] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.419839] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.453101] env[61972]: DEBUG nova.compute.manager [req-9adbe884-6609-4191-9cf9-905bdd18843b req-6ae2417a-8953-4730-84ce-b774054ed880 service nova] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Received event network-vif-deleted-99bf816e-e60d-4917-87ee-03be0502915b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 742.633230] env[61972]: INFO nova.compute.manager [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] [instance: e0d51c99-1916-4d66-a141-dfa5d4357174] Took 1.02 seconds to deallocate network for instance. [ 742.747235] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Releasing lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.747649] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 742.747840] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.749029] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12ccf06f-4e46-41e1-848c-fd9dec2ac22d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.759511] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedbf3eb-d8a8-4bf8-8a55-454ddc2ad003 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.781822] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7801858d-bc2a-466e-a6f2-a8c6b6ff4705 could not be found. [ 742.782076] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.782289] env[61972]: INFO nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Took 0.03 seconds to destroy the instance on the hypervisor. [ 742.782825] env[61972]: DEBUG oslo.service.loopingcall [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.782825] env[61972]: DEBUG nova.compute.manager [-] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 742.782825] env[61972]: DEBUG nova.network.neutron [-] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.798713] env[61972]: DEBUG nova.network.neutron [-] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.821066] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389114, 'name': CreateVM_Task, 'duration_secs': 0.272722} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.821309] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 742.821713] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.822250] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.822567] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 742.822809] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-292c5dfc-facb-4498-955d-898417a13d74 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.827438] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 742.827438] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce8b90-52ef-384d-001b-6f21535c4b8a" [ 742.827438] env[61972]: _type = "Task" [ 742.827438] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.835259] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce8b90-52ef-384d-001b-6f21535c4b8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.941491] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.017691] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.302013] env[61972]: DEBUG nova.network.neutron [-] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.338495] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce8b90-52ef-384d-001b-6f21535c4b8a, 'name': SearchDatastore_Task, 'duration_secs': 0.011145} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.338975] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.339049] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.339611] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.339611] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.339611] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.339796] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcfd122e-54d8-4ed6-af36-0164928712d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.347573] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.347747] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.349928] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f26cde8-3c46-4393-94ca-3f74533e6f96 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.353841] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 743.353841] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7aaaf-3c77-bd58-09ba-193a8b075eeb" [ 743.353841] env[61972]: _type = "Task" [ 743.353841] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.361409] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7aaaf-3c77-bd58-09ba-193a8b075eeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.520684] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-a978943b-afd3-44f4-b6c1-5a72dda8ca35" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.521115] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 743.521201] env[61972]: DEBUG nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 743.521383] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.542520] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.668686] env[61972]: INFO nova.scheduler.client.report [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Deleted allocations for instance e0d51c99-1916-4d66-a141-dfa5d4357174 [ 743.797439] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b840aabf-2039-41d1-8b58-1c89a901a1f5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.805544] env[61972]: INFO nova.compute.manager [-] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Took 1.02 seconds to deallocate network for instance. [ 743.807880] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b1b5c3-3742-4708-9f76-bca60a2bae44 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.811042] env[61972]: DEBUG nova.compute.claims [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 743.811232] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.839059] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb955b7a-c143-4dec-851e-67256895428a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.845347] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd042fad-58c9-4a80-87da-77dbdcc1997c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.859317] env[61972]: DEBUG nova.compute.provider_tree [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.869045] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7aaaf-3c77-bd58-09ba-193a8b075eeb, 'name': SearchDatastore_Task, 'duration_secs': 0.008068} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.870134] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e981db00-7ba0-47f7-abda-7e290e4f7232 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.875973] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 743.875973] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]523ccf86-55b2-b1b2-a8c5-84a12e9b294e" [ 743.875973] env[61972]: _type = "Task" [ 743.875973] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.883930] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523ccf86-55b2-b1b2-a8c5-84a12e9b294e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.045524] env[61972]: DEBUG nova.network.neutron [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.178911] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7404f1c0-2276-4183-ad48-0bbf027a58cb tempest-ServerMetadataTestJSON-2117621628 tempest-ServerMetadataTestJSON-2117621628-project-member] Lock "e0d51c99-1916-4d66-a141-dfa5d4357174" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.965s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.364461] env[61972]: DEBUG nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 744.387060] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523ccf86-55b2-b1b2-a8c5-84a12e9b294e, 'name': SearchDatastore_Task, 'duration_secs': 0.009995} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.387204] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.387434] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.388316] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1148b232-bdb1-4ee9-9af1-7f725a08970f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.395676] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 744.395676] env[61972]: value = "task-1389115" [ 744.395676] env[61972]: _type = "Task" [ 744.395676] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.404675] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.548802] env[61972]: INFO nova.compute.manager [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: a978943b-afd3-44f4-b6c1-5a72dda8ca35] Took 1.03 seconds to deallocate network for instance. [ 744.681561] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 744.870597] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.870597] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 744.873688] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.680s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.909279] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389115, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456087} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.909279] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.909674] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.910754] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-506c0795-acaf-4005-91d1-d3db4c512459 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.917351] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 744.917351] env[61972]: value = "task-1389116" [ 744.917351] env[61972]: _type = "Task" [ 744.917351] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.925074] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.203888] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.381662] env[61972]: DEBUG nova.compute.utils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.382871] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 745.383377] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.429139] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068179} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.429139] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.429624] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228c04dd-378a-46ce-8307-3f514238d4ba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.451438] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.453812] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d22fccc-c4e2-4a0f-87db-1379a7a0e3a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.474730] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 745.474730] env[61972]: value = "task-1389117" [ 745.474730] env[61972]: _type = "Task" [ 745.474730] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.484130] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389117, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.490658] env[61972]: DEBUG nova.policy [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd540dc382bcd4f3590cf3fd4e24114cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5956bee656b472c8abfaed83a60fe4d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 745.576129] env[61972]: INFO nova.scheduler.client.report [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocations for instance a978943b-afd3-44f4-b6c1-5a72dda8ca35 [ 745.820555] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9461019a-f525-4de5-8ef6-c749dfb27916 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.828052] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8cb3d6-641f-4f6e-bc9b-e0ea49283ddf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.858867] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09525f70-8d89-4f0e-83d3-b5abc5e9b2f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.866232] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8804a5-f204-4a41-9e06-58f9f15f24e5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.879595] env[61972]: DEBUG nova.compute.provider_tree [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.886615] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 745.984351] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389117, 'name': ReconfigVM_Task, 'duration_secs': 0.264184} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.984497] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29/9fd9fc35-7105-4941-8e05-cf4e45bb5d29.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.985130] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f63956c0-eb93-4eab-b747-23be1e2290b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.991721] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 745.991721] env[61972]: value = "task-1389118" [ 745.991721] env[61972]: _type = "Task" [ 745.991721] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.999446] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389118, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.087699] env[61972]: DEBUG oslo_concurrency.lockutils [None req-306b40ee-2c55-472b-8baa-1178d2df8604 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "a978943b-afd3-44f4-b6c1-5a72dda8ca35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.899s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.120774] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Successfully created port: c14c4024-5e3d-49f9-a5c5-db5803e5b05f {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.382546] env[61972]: DEBUG nova.scheduler.client.report [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 746.501955] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389118, 'name': Rename_Task, 'duration_secs': 0.135471} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.502454] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.502824] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-446c96be-4e96-44da-a193-e110d43ed4bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.514176] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Waiting for the task: (returnval){ [ 746.514176] env[61972]: value = "task-1389119" [ 746.514176] env[61972]: _type = "Task" [ 746.514176] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.523511] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.592365] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 746.890131] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.016s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.890317] env[61972]: ERROR nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Traceback (most recent call last): [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self.driver.spawn(context, instance, image_meta, [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] vm_ref = self.build_virtual_machine(instance, [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 746.890317] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] for vif in network_info: [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] return self._sync_wrapper(fn, *args, **kwargs) [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self.wait() [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self[:] = self._gt.wait() [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] return self._exit_event.wait() [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] current.throw(*self._exc) [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 746.890745] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] result = function(*args, **kwargs) [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] return func(*args, **kwargs) [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] raise e [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] nwinfo = self.network_api.allocate_for_instance( [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] created_port_ids = self._update_ports_for_instance( [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] with excutils.save_and_reraise_exception(): [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] self.force_reraise() [ 746.891172] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] raise self.value [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] updated_port = self._update_port( [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] _ensure_no_port_binding_failure(port) [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] raise exception.PortBindingFailed(port_id=port['id']) [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] nova.exception.PortBindingFailed: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. [ 746.891596] env[61972]: ERROR nova.compute.manager [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] [ 746.891596] env[61972]: DEBUG nova.compute.utils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 746.892769] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.671s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.897508] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Build of instance e0735ee2-0a9d-4291-8465-b644816bf8e3 was re-scheduled: Binding failed for port 6992765b-0676-40cb-85a8-bd255585b49e, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 746.897508] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 746.897508] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquiring lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.897508] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Acquired lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.897806] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.902147] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 746.947185] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.948030] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.948030] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.948030] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.948030] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.948030] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.948309] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.948309] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.948427] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.948585] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.948780] env[61972]: DEBUG nova.virt.hardware [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.952035] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bb4830-e99a-4f38-9ab9-21d30d247439 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.959149] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f6eed0-d682-4d3a-9431-869e5f96ddc8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.021339] env[61972]: DEBUG oslo_vmware.api [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Task: {'id': task-1389119, 'name': PowerOnVM_Task, 'duration_secs': 0.402886} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.021607] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.021801] env[61972]: DEBUG nova.compute.manager [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 747.022919] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9921014-3b36-4355-bd89-eb5a13127c05 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.116032] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.346170] env[61972]: DEBUG nova.compute.manager [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Received event network-changed-c14c4024-5e3d-49f9-a5c5-db5803e5b05f {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 747.346835] env[61972]: DEBUG nova.compute.manager [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Refreshing instance network info cache due to event network-changed-c14c4024-5e3d-49f9-a5c5-db5803e5b05f. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 747.346835] env[61972]: DEBUG oslo_concurrency.lockutils [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] Acquiring lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.346975] env[61972]: DEBUG oslo_concurrency.lockutils [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] Acquired lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.347200] env[61972]: DEBUG nova.network.neutron [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Refreshing network info cache for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.354627] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "3d424523-b45d-4174-ac7a-08fd653e314f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.354627] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.443732] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.542374] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.620264] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.731451] env[61972]: ERROR nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 747.731451] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 747.731451] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.731451] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.731451] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.731451] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.731451] env[61972]: ERROR nova.compute.manager raise self.value [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.731451] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 747.731451] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.731451] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 747.732235] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.732235] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 747.732235] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 747.732235] env[61972]: ERROR nova.compute.manager [ 747.732235] env[61972]: Traceback (most recent call last): [ 747.732235] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 747.732235] env[61972]: listener.cb(fileno) [ 747.732235] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.732235] env[61972]: result = function(*args, **kwargs) [ 747.732235] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 747.732235] env[61972]: return func(*args, **kwargs) [ 747.732235] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 747.732235] env[61972]: raise e [ 747.732235] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 747.732235] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 747.732235] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.732235] env[61972]: created_port_ids = self._update_ports_for_instance( [ 747.732235] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.732235] env[61972]: with excutils.save_and_reraise_exception(): [ 747.732235] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.732235] env[61972]: self.force_reraise() [ 747.732235] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.732235] env[61972]: raise self.value [ 747.732235] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.732235] env[61972]: updated_port = self._update_port( [ 747.732235] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.732235] env[61972]: _ensure_no_port_binding_failure(port) [ 747.732235] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.732235] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 747.733902] env[61972]: nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 747.733902] env[61972]: Removing descriptor: 19 [ 747.733902] env[61972]: ERROR nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] Traceback (most recent call last): [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] yield resources [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self.driver.spawn(context, instance, image_meta, [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self._vmops.spawn(context, instance, image_meta, injected_files, [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 747.733902] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] vm_ref = self.build_virtual_machine(instance, [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] vif_infos = vmwarevif.get_vif_info(self._session, [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] for vif in network_info: [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return self._sync_wrapper(fn, *args, **kwargs) [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self.wait() [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self[:] = self._gt.wait() [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return self._exit_event.wait() [ 747.734793] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] result = hub.switch() [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return self.greenlet.switch() [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] result = function(*args, **kwargs) [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return func(*args, **kwargs) [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] raise e [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] nwinfo = self.network_api.allocate_for_instance( [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.735625] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] created_port_ids = self._update_ports_for_instance( [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] with excutils.save_and_reraise_exception(): [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self.force_reraise() [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] raise self.value [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] updated_port = self._update_port( [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] _ensure_no_port_binding_failure(port) [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.736529] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] raise exception.PortBindingFailed(port_id=port['id']) [ 747.737418] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 747.737418] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] [ 747.737418] env[61972]: INFO nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Terminating instance [ 747.830641] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166ffd41-8073-4043-814d-4dcfc7db192d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.838331] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663bc68a-11f1-4b56-851d-c787123590c8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.870238] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a730e418-75d2-4ba8-976d-77503a049254 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.878651] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd97a07-1bea-4453-8e5e-753e54922738 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.892593] env[61972]: DEBUG nova.compute.provider_tree [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.894556] env[61972]: DEBUG nova.network.neutron [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.008012] env[61972]: DEBUG nova.network.neutron [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.123364] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Releasing lock "refresh_cache-e0735ee2-0a9d-4291-8465-b644816bf8e3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.123692] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 748.124576] env[61972]: DEBUG nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 748.124576] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 748.131330] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.131595] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.131803] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.131979] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.132159] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.135531] env[61972]: INFO nova.compute.manager [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Terminating instance [ 748.138950] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.235724] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquiring lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.398247] env[61972]: DEBUG nova.scheduler.client.report [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 748.510058] env[61972]: DEBUG oslo_concurrency.lockutils [req-a9d6f0df-6f4b-4b9c-bdbc-51823e9283e5 req-5d34466b-cf17-4248-ae3b-30f4484e677f service nova] Releasing lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.510531] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquired lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.510716] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.639382] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "refresh_cache-9fd9fc35-7105-4941-8e05-cf4e45bb5d29" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.639581] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquired lock "refresh_cache-9fd9fc35-7105-4941-8e05-cf4e45bb5d29" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.639782] env[61972]: DEBUG nova.network.neutron [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.641073] env[61972]: DEBUG nova.network.neutron [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.902672] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.010s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.903353] env[61972]: ERROR nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Traceback (most recent call last): [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self.driver.spawn(context, instance, image_meta, [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] vm_ref = self.build_virtual_machine(instance, [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.903353] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] for vif in network_info: [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return self._sync_wrapper(fn, *args, **kwargs) [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self.wait() [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self[:] = self._gt.wait() [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return self._exit_event.wait() [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] result = hub.switch() [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.903784] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return self.greenlet.switch() [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] result = function(*args, **kwargs) [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] return func(*args, **kwargs) [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] raise e [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] nwinfo = self.network_api.allocate_for_instance( [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] created_port_ids = self._update_ports_for_instance( [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] with excutils.save_and_reraise_exception(): [ 748.904208] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] self.force_reraise() [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] raise self.value [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] updated_port = self._update_port( [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] _ensure_no_port_binding_failure(port) [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] raise exception.PortBindingFailed(port_id=port['id']) [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] nova.exception.PortBindingFailed: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. [ 748.904600] env[61972]: ERROR nova.compute.manager [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] [ 748.904955] env[61972]: DEBUG nova.compute.utils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.905826] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.742s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.906008] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.906169] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 748.906454] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.372s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.909912] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Build of instance 56488ac6-c94b-4b40-9cad-b0c36a3d293e was re-scheduled: Binding failed for port 396f16dd-e25c-4099-a353-68109855f1fe, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 748.910430] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 748.910652] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.910800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquired lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.910956] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.912975] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33830a1f-cffb-4752-9a5b-750c934ae422 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.925062] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c793bd-2475-43b5-9a46-c88dd7673aa5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.941441] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354bd319-87f8-415c-a8a6-6f2338ff4764 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.949572] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7163942a-2a38-46eb-b6ea-1b15aa658941 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.977532] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181409MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 748.977714] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.130824] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.147177] env[61972]: INFO nova.compute.manager [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] [instance: e0735ee2-0a9d-4291-8465-b644816bf8e3] Took 1.02 seconds to deallocate network for instance. [ 749.177870] env[61972]: DEBUG nova.network.neutron [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.289684] env[61972]: DEBUG nova.network.neutron [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.313424] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.445749] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.451969] env[61972]: DEBUG nova.compute.manager [req-7880104f-1473-4b41-b483-05a8274dbd15 req-f3040269-ed06-4a35-a769-d0f5b673e246 service nova] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Received event network-vif-deleted-c14c4024-5e3d-49f9-a5c5-db5803e5b05f {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 749.523444] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.758397] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4148c78d-5164-42a7-8f82-1dc0d990f012 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.766672] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cafeb5a-7ade-4eaa-ab2d-df033cb96150 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.796715] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Releasing lock "refresh_cache-9fd9fc35-7105-4941-8e05-cf4e45bb5d29" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.796890] env[61972]: DEBUG nova.compute.manager [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 749.797094] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.798086] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923cd09b-39da-46c8-9add-d2f57188ced1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.801273] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20523997-14f8-4e58-b805-8580af34adb6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.811033] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20312d5-5f0e-424d-b56a-3a2cdc8b9147 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.814763] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.814993] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-513f550b-343a-4a93-8189-919c8c703936 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.826266] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Releasing lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.826665] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 749.826843] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.827641] env[61972]: DEBUG nova.compute.provider_tree [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.830466] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3de6f042-6c7b-4c1d-b653-a04199ea94af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.831874] env[61972]: DEBUG oslo_vmware.api [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 749.831874] env[61972]: value = "task-1389120" [ 749.831874] env[61972]: _type = "Task" [ 749.831874] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.840230] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829f1317-8c6d-46c9-b934-7ccb37f6660b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.853480] env[61972]: DEBUG oslo_vmware.api [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.866928] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dc5ef08a-8692-4274-84df-7c2923099249 could not be found. [ 749.866928] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.867196] env[61972]: INFO nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Took 0.04 seconds to destroy the instance on the hypervisor. [ 749.867397] env[61972]: DEBUG oslo.service.loopingcall [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.867607] env[61972]: DEBUG nova.compute.manager [-] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 749.867701] env[61972]: DEBUG nova.network.neutron [-] [instance: dc5ef08a-8692-4274-84df-7c2923099249] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 749.893611] env[61972]: DEBUG nova.network.neutron [-] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.026636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Releasing lock "refresh_cache-56488ac6-c94b-4b40-9cad-b0c36a3d293e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.026972] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 750.027206] env[61972]: DEBUG nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 750.027382] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.045054] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.175623] env[61972]: INFO nova.scheduler.client.report [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Deleted allocations for instance e0735ee2-0a9d-4291-8465-b644816bf8e3 [ 750.334545] env[61972]: DEBUG nova.scheduler.client.report [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 750.357325] env[61972]: DEBUG oslo_vmware.api [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389120, 'name': PowerOffVM_Task, 'duration_secs': 0.314938} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.357325] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.357469] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.357705] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-882ab62e-4b4b-4556-9c26-c2f04fee74a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.383798] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.384126] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.384271] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleting the datastore file [datastore1] 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.384536] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f0d078f-0af0-4252-9312-ea493015f2f0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.390165] env[61972]: DEBUG oslo_vmware.api [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for the task: (returnval){ [ 750.390165] env[61972]: value = "task-1389122" [ 750.390165] env[61972]: _type = "Task" [ 750.390165] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.397913] env[61972]: DEBUG nova.network.neutron [-] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.399344] env[61972]: DEBUG oslo_vmware.api [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.548438] env[61972]: DEBUG nova.network.neutron [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.567117] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "e2b6dd4e-b639-4553-a45f-87c155506ea3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.567341] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.684737] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5547deb-63d3-43aa-a632-1c52518dccc5 tempest-ServerActionsV293TestJSON-190518957 tempest-ServerActionsV293TestJSON-190518957-project-member] Lock "e0735ee2-0a9d-4291-8465-b644816bf8e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.330s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.785851] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "94bd64b9-3d20-4631-baed-4500f9beb9c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.786087] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.847522] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.941s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.848608] env[61972]: ERROR nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Traceback (most recent call last): [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self.driver.spawn(context, instance, image_meta, [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] vm_ref = self.build_virtual_machine(instance, [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] vif_infos = vmwarevif.get_vif_info(self._session, [ 750.848608] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] for vif in network_info: [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return self._sync_wrapper(fn, *args, **kwargs) [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self.wait() [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self[:] = self._gt.wait() [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return self._exit_event.wait() [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] result = hub.switch() [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 750.849074] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return self.greenlet.switch() [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] result = function(*args, **kwargs) [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] return func(*args, **kwargs) [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] raise e [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] nwinfo = self.network_api.allocate_for_instance( [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] created_port_ids = self._update_ports_for_instance( [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] with excutils.save_and_reraise_exception(): [ 750.849487] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] self.force_reraise() [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] raise self.value [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] updated_port = self._update_port( [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] _ensure_no_port_binding_failure(port) [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] raise exception.PortBindingFailed(port_id=port['id']) [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] nova.exception.PortBindingFailed: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. [ 750.849930] env[61972]: ERROR nova.compute.manager [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] [ 750.850369] env[61972]: DEBUG nova.compute.utils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 750.850369] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.453s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.851889] env[61972]: INFO nova.compute.claims [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.854304] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Build of instance 2725d6ed-89d9-479f-b6ee-d16523e0abab was re-scheduled: Binding failed for port 6bc96103-47f0-437c-b043-990f91968c0e, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 750.854679] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 750.854891] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquiring lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.855745] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Acquired lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.855745] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.899621] env[61972]: DEBUG oslo_vmware.api [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Task: {'id': task-1389122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09599} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.900057] env[61972]: INFO nova.compute.manager [-] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Took 1.03 seconds to deallocate network for instance. [ 750.900385] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.900666] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.900921] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.901306] env[61972]: INFO nova.compute.manager [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Took 1.10 seconds to destroy the instance on the hypervisor. [ 750.901436] env[61972]: DEBUG oslo.service.loopingcall [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.903029] env[61972]: DEBUG nova.compute.manager [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 750.903194] env[61972]: DEBUG nova.network.neutron [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.905648] env[61972]: DEBUG nova.compute.claims [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 750.905996] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.918272] env[61972]: DEBUG nova.network.neutron [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.050158] env[61972]: INFO nova.compute.manager [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 56488ac6-c94b-4b40-9cad-b0c36a3d293e] Took 1.02 seconds to deallocate network for instance. [ 751.188934] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 751.381679] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.422335] env[61972]: DEBUG nova.network.neutron [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.494866] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.713494] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.928217] env[61972]: INFO nova.compute.manager [-] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Took 1.02 seconds to deallocate network for instance. [ 751.996971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Releasing lock "refresh_cache-2725d6ed-89d9-479f-b6ee-d16523e0abab" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.997229] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 751.997403] env[61972]: DEBUG nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 751.997552] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.032223] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.088608] env[61972]: INFO nova.scheduler.client.report [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Deleted allocations for instance 56488ac6-c94b-4b40-9cad-b0c36a3d293e [ 752.287502] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afe28ef-3e0c-4f61-b75a-bd4e53506652 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.296152] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee7c89b-7eb5-4abb-b838-41a4c869c462 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.333570] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216b0a14-ee7c-4d46-abef-f3d390d27756 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.341707] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2602f1be-7b1c-430d-83e0-fed0b8f6b529 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.358804] env[61972]: DEBUG nova.compute.provider_tree [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.443204] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.541590] env[61972]: DEBUG nova.network.neutron [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.606601] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5a580a6b-85f6-4f47-bf2c-6bf3b4cbc309 tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "56488ac6-c94b-4b40-9cad-b0c36a3d293e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.191s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.862651] env[61972]: DEBUG nova.scheduler.client.report [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 753.048022] env[61972]: INFO nova.compute.manager [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] [instance: 2725d6ed-89d9-479f-b6ee-d16523e0abab] Took 1.05 seconds to deallocate network for instance. [ 753.109063] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 753.367474] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.367890] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 753.370619] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.046s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.370752] env[61972]: DEBUG nova.objects.instance [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61972) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 753.630372] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.875021] env[61972]: DEBUG nova.compute.utils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 753.878893] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 753.879741] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 753.928747] env[61972]: DEBUG nova.policy [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2da45520a5f9400c926ef0958bc1c155', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee8f0ad993d74cad9e213e5f7f15558b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 754.079509] env[61972]: INFO nova.scheduler.client.report [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Deleted allocations for instance 2725d6ed-89d9-479f-b6ee-d16523e0abab [ 754.302495] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Successfully created port: e89e6e71-0bc6-4f0f-94f3-fad27da19bda {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.382564] env[61972]: DEBUG oslo_concurrency.lockutils [None req-19886552-7312-4468-b205-9b41c7d01ad3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.384033] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 754.387352] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.765s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.592378] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e259e3ef-90d4-4c62-9bcc-451d67c359bd tempest-ServerRescueNegativeTestJSON-2126695368 tempest-ServerRescueNegativeTestJSON-2126695368-project-member] Lock "2725d6ed-89d9-479f-b6ee-d16523e0abab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.100s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.892252] env[61972]: INFO nova.virt.block_device [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Booting with volume 46487235-43fe-4154-9820-0554d03f1554 at /dev/sda [ 754.947460] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4422156d-97be-409d-9e98-c42f5763f7a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.956337] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5938789-adcf-4bb0-ac0f-8ac45d65bb60 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.983088] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-198f8a3f-8738-4f50-91da-a87ea32989ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.990559] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b92c54-7b57-4834-879b-8c76546e2b66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.016713] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ada4617-f9b9-4a01-b042-a51d49e2f41a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.022937] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b108d1c-4f14-4889-adf8-784cab4450e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.035647] env[61972]: DEBUG nova.virt.block_device [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Updating existing volume attachment record: 43e35942-fd6b-447d-8a23-e1f0e0a3fe97 {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 755.097125] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 755.281800] env[61972]: DEBUG nova.compute.manager [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Received event network-changed-e89e6e71-0bc6-4f0f-94f3-fad27da19bda {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 755.281979] env[61972]: DEBUG nova.compute.manager [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Refreshing instance network info cache due to event network-changed-e89e6e71-0bc6-4f0f-94f3-fad27da19bda. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 755.282334] env[61972]: DEBUG oslo_concurrency.lockutils [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] Acquiring lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.282391] env[61972]: DEBUG oslo_concurrency.lockutils [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] Acquired lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.283038] env[61972]: DEBUG nova.network.neutron [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Refreshing network info cache for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.326596] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06023508-c9bf-426e-a1a6-2c3a5c8c4fe1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.335589] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567ab617-b26b-42f8-a112-f87912e0ea5a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.374063] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7f2f82-6c17-4f96-9b3d-c3e2b682f1a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.381727] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8cd9a3-54d6-479c-b6a5-11f73757e056 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.399577] env[61972]: DEBUG nova.compute.provider_tree [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.549825] env[61972]: ERROR nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 755.549825] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 755.549825] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 755.549825] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 755.549825] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 755.549825] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 755.549825] env[61972]: ERROR nova.compute.manager raise self.value [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 755.549825] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 755.549825] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 755.549825] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 755.550348] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 755.550348] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 755.550348] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 755.550348] env[61972]: ERROR nova.compute.manager [ 755.550348] env[61972]: Traceback (most recent call last): [ 755.550348] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 755.550348] env[61972]: listener.cb(fileno) [ 755.550348] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 755.550348] env[61972]: result = function(*args, **kwargs) [ 755.550348] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 755.550348] env[61972]: return func(*args, **kwargs) [ 755.550348] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 755.550348] env[61972]: raise e [ 755.550348] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 755.550348] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 755.550348] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 755.550348] env[61972]: created_port_ids = self._update_ports_for_instance( [ 755.550348] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 755.550348] env[61972]: with excutils.save_and_reraise_exception(): [ 755.550348] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 755.550348] env[61972]: self.force_reraise() [ 755.550348] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 755.550348] env[61972]: raise self.value [ 755.550348] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 755.550348] env[61972]: updated_port = self._update_port( [ 755.550348] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 755.550348] env[61972]: _ensure_no_port_binding_failure(port) [ 755.550348] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 755.550348] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 755.551215] env[61972]: nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 755.551215] env[61972]: Removing descriptor: 19 [ 755.626125] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.803786] env[61972]: DEBUG nova.network.neutron [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.903404] env[61972]: DEBUG nova.scheduler.client.report [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 755.995345] env[61972]: DEBUG nova.network.neutron [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.408173] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.021s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.408800] env[61972]: ERROR nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Traceback (most recent call last): [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self.driver.spawn(context, instance, image_meta, [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] vm_ref = self.build_virtual_machine(instance, [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.408800] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] for vif in network_info: [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return self._sync_wrapper(fn, *args, **kwargs) [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self.wait() [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self[:] = self._gt.wait() [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return self._exit_event.wait() [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] result = hub.switch() [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.409180] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return self.greenlet.switch() [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] result = function(*args, **kwargs) [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] return func(*args, **kwargs) [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] raise e [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] nwinfo = self.network_api.allocate_for_instance( [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] created_port_ids = self._update_ports_for_instance( [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] with excutils.save_and_reraise_exception(): [ 756.409548] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] self.force_reraise() [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] raise self.value [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] updated_port = self._update_port( [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] _ensure_no_port_binding_failure(port) [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] raise exception.PortBindingFailed(port_id=port['id']) [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] nova.exception.PortBindingFailed: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. [ 756.409910] env[61972]: ERROR nova.compute.manager [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] [ 756.410380] env[61972]: DEBUG nova.compute.utils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 756.411492] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Build of instance 2b0039dd-1219-465d-beb8-0262e0e40029 was re-scheduled: Binding failed for port 1a703979-d671-41f7-845a-aa6e43246c2c, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 756.411924] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 756.412301] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquiring lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.412502] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Acquired lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.412692] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.416915] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.666s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.416915] env[61972]: INFO nova.compute.claims [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.440055] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.498479] env[61972]: DEBUG oslo_concurrency.lockutils [req-cf743fa1-632f-4b41-b95b-c8b923b92b7c req-21aa47d1-095a-4925-b7a6-f3a001960653 service nova] Releasing lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.558767] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.061164] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Releasing lock "refresh_cache-2b0039dd-1219-465d-beb8-0262e0e40029" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.061433] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 757.061610] env[61972]: DEBUG nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 757.061849] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.076054] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.149058] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 757.149260] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.149486] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.149658] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.149857] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.150013] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.150191] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.150542] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.150592] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.150734] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.150893] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.151084] env[61972]: DEBUG nova.virt.hardware [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.152289] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37ef2b1-e4bf-4f03-8fff-4e73bd13bb79 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.160982] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fb27e6-1e36-471b-8684-711ff8cc8e39 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.175038] env[61972]: ERROR nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Traceback (most recent call last): [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] yield resources [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self.driver.spawn(context, instance, image_meta, [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] vm_ref = self.build_virtual_machine(instance, [ 757.175038] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] for vif in network_info: [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] return self._sync_wrapper(fn, *args, **kwargs) [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self.wait() [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self[:] = self._gt.wait() [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] return self._exit_event.wait() [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 757.175499] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] current.throw(*self._exc) [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] result = function(*args, **kwargs) [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] return func(*args, **kwargs) [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] raise e [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] nwinfo = self.network_api.allocate_for_instance( [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] created_port_ids = self._update_ports_for_instance( [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] with excutils.save_and_reraise_exception(): [ 757.175928] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self.force_reraise() [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] raise self.value [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] updated_port = self._update_port( [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] _ensure_no_port_binding_failure(port) [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] raise exception.PortBindingFailed(port_id=port['id']) [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 757.176400] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] [ 757.176400] env[61972]: INFO nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Terminating instance [ 757.340842] env[61972]: DEBUG nova.compute.manager [req-c397f639-72da-4d26-a39c-d9051f0ac153 req-96b7efa4-e825-465d-92c7-98266d77b4df service nova] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Received event network-vif-deleted-e89e6e71-0bc6-4f0f-94f3-fad27da19bda {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 757.582212] env[61972]: DEBUG nova.network.neutron [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.679666] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquiring lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.679860] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquired lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.680024] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.749270] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca655358-aeb5-4a38-8763-cfc8105eb2fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.757827] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8133c0b-a46a-454a-b3ba-69a94fdb9e61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.795536] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eedb53-30f4-4b46-a818-feb6a6ea3dae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.804179] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6091a4-017a-4ad1-967f-d360502a26ec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.817015] env[61972]: DEBUG nova.compute.provider_tree [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.086926] env[61972]: INFO nova.compute.manager [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] [instance: 2b0039dd-1219-465d-beb8-0262e0e40029] Took 1.02 seconds to deallocate network for instance. [ 758.199398] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.281839] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.320642] env[61972]: DEBUG nova.scheduler.client.report [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 758.485461] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "667aff7f-57d5-4133-934d-386602a866f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.485692] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "667aff7f-57d5-4133-934d-386602a866f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.785035] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Releasing lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.785335] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 758.785656] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-043693d7-5953-4f8e-8844-25b3753ce7d4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.794641] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878b9f11-f0a7-4bc5-8ab8-25f41c86213d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.816060] env[61972]: WARNING nova.virt.vmwareapi.driver [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance bc10dded-e669-4fdb-9f5b-cc6abc3a37c7 could not be found. [ 758.816168] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 758.816463] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-909d4fa4-7d63-4dae-95a1-26678a83f716 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.823653] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9803026b-0d53-4ccd-a260-4128d5931315 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.833942] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.834423] env[61972]: DEBUG nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 758.838141] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.026s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.851401] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bc10dded-e669-4fdb-9f5b-cc6abc3a37c7 could not be found. [ 758.851639] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.851824] env[61972]: INFO nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Took 0.07 seconds to destroy the instance on the hypervisor. [ 758.852068] env[61972]: DEBUG oslo.service.loopingcall [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.852313] env[61972]: DEBUG nova.compute.manager [-] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 758.852409] env[61972]: DEBUG nova.network.neutron [-] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 758.871817] env[61972]: DEBUG nova.network.neutron [-] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.118025] env[61972]: INFO nova.scheduler.client.report [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Deleted allocations for instance 2b0039dd-1219-465d-beb8-0262e0e40029 [ 759.345902] env[61972]: DEBUG nova.compute.utils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.347590] env[61972]: DEBUG nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 759.373780] env[61972]: DEBUG nova.network.neutron [-] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.621351] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d113bfc-e330-4629-a377-6f10c47b2bdb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.628074] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e377f544-5efc-466e-86de-8666f807a67b tempest-TenantUsagesTestJSON-366749740 tempest-TenantUsagesTestJSON-366749740-project-member] Lock "2b0039dd-1219-465d-beb8-0262e0e40029" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.854s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.630609] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846b00ee-b0f3-4daa-90bd-f46555a8f194 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.661599] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e66b4a9-0463-4995-bdb3-fb3d30471c7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.669346] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f58babc-ed6c-49a0-87ee-be70557c82c6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.682862] env[61972]: DEBUG nova.compute.provider_tree [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.849869] env[61972]: DEBUG nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 759.880730] env[61972]: INFO nova.compute.manager [-] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Took 1.03 seconds to deallocate network for instance. [ 760.131018] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 760.186797] env[61972]: DEBUG nova.scheduler.client.report [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 760.439332] env[61972]: INFO nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Took 0.56 seconds to detach 1 volumes for instance. [ 760.441572] env[61972]: DEBUG nova.compute.claims [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.441819] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.650743] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.691211] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.854s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.691826] env[61972]: ERROR nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Traceback (most recent call last): [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self.driver.spawn(context, instance, image_meta, [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self._vmops.spawn(context, instance, image_meta, injected_files, [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] vm_ref = self.build_virtual_machine(instance, [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] vif_infos = vmwarevif.get_vif_info(self._session, [ 760.691826] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] for vif in network_info: [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return self._sync_wrapper(fn, *args, **kwargs) [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self.wait() [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self[:] = self._gt.wait() [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return self._exit_event.wait() [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] result = hub.switch() [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 760.692205] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return self.greenlet.switch() [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] result = function(*args, **kwargs) [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] return func(*args, **kwargs) [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] raise e [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] nwinfo = self.network_api.allocate_for_instance( [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] created_port_ids = self._update_ports_for_instance( [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] with excutils.save_and_reraise_exception(): [ 760.692655] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] self.force_reraise() [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] raise self.value [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] updated_port = self._update_port( [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] _ensure_no_port_binding_failure(port) [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] raise exception.PortBindingFailed(port_id=port['id']) [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] nova.exception.PortBindingFailed: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. [ 760.693035] env[61972]: ERROR nova.compute.manager [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] [ 760.693369] env[61972]: DEBUG nova.compute.utils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 760.693983] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.490s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.695504] env[61972]: INFO nova.compute.claims [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.698441] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Build of instance 7801858d-bc2a-466e-a6f2-a8c6b6ff4705 was re-scheduled: Binding failed for port 99bf816e-e60d-4917-87ee-03be0502915b, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 760.698554] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 760.698703] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquiring lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.698838] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Acquired lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.698989] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.861683] env[61972]: DEBUG nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 760.886916] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 760.923337] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 760.923337] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.923337] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 760.923337] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.923337] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 760.923337] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 760.923825] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 760.923825] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 760.923825] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 760.923825] env[61972]: DEBUG nova.virt.hardware [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.923825] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d4bda9-b0e3-41b4-858e-85bf462f49bd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.923825] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1496c322-5ffb-4e09-b3ca-d5fe2619c605 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.924101] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.924101] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Creating folder: Project (8e108d51112949369630d686da703b51). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.924101] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3492f5bf-bf51-4ab1-9d35-771de2092c18 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.928334] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Created folder: Project (8e108d51112949369630d686da703b51) in parent group-v294799. [ 760.928541] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Creating folder: Instances. Parent ref: group-v294816. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.928787] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03cff655-c302-4c61-8b82-aef0ea0450db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.939029] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Created folder: Instances in parent group-v294816. [ 760.939029] env[61972]: DEBUG oslo.service.loopingcall [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.939029] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.939029] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6e2af07-3228-4310-9781-33a187ed77e6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.956444] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.956444] env[61972]: value = "task-1389126" [ 760.956444] env[61972]: _type = "Task" [ 760.956444] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.964464] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389126, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.223915] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.309897] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.466372] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389126, 'name': CreateVM_Task, 'duration_secs': 0.233858} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.467408] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.467408] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.467408] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.467581] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 761.467677] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4cd8ae2-c38c-43ac-9312-fa2e74319f82 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.472734] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 761.472734] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ad5355-11b5-0dd2-b21f-60556a17603f" [ 761.472734] env[61972]: _type = "Task" [ 761.472734] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.480050] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ad5355-11b5-0dd2-b21f-60556a17603f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.814349] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Releasing lock "refresh_cache-7801858d-bc2a-466e-a6f2-a8c6b6ff4705" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.815083] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 761.815083] env[61972]: DEBUG nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 761.815083] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.829779] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.982817] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ad5355-11b5-0dd2-b21f-60556a17603f, 'name': SearchDatastore_Task, 'duration_secs': 0.009442} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.985156] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.985398] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.985621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.985764] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.985936] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.986354] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24537d13-d626-4846-a259-049baaa5a9a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.993842] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.994023] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.996654] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-424ef8d2-6f67-45ec-9243-5a67a2a499db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.001711] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 762.001711] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]527d0cca-7175-dfe9-7488-0d2136d06ba4" [ 762.001711] env[61972]: _type = "Task" [ 762.001711] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.008697] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]527d0cca-7175-dfe9-7488-0d2136d06ba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.019516] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92a4315-e22a-4c3b-a5bd-cf28c9e99a97 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.026016] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c978ed9c-5aa7-4f29-a5da-0801c9e85b4b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.055693] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1b87fb-4906-4dfb-a2a5-e8f4826e1880 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.062466] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0d51b8-822b-4b69-bf14-4a0e96b9bbb9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.074839] env[61972]: DEBUG nova.compute.provider_tree [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.332012] env[61972]: DEBUG nova.network.neutron [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.514074] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]527d0cca-7175-dfe9-7488-0d2136d06ba4, 'name': SearchDatastore_Task, 'duration_secs': 0.007843} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.515330] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0928dddd-0371-45e3-b2e6-4d4a8431e179 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.521136] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 762.521136] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525d10b7-1fcd-8739-23e2-7098dd0808dc" [ 762.521136] env[61972]: _type = "Task" [ 762.521136] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.529392] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525d10b7-1fcd-8739-23e2-7098dd0808dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.577692] env[61972]: DEBUG nova.scheduler.client.report [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 762.837682] env[61972]: INFO nova.compute.manager [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] [instance: 7801858d-bc2a-466e-a6f2-a8c6b6ff4705] Took 1.02 seconds to deallocate network for instance. [ 763.031938] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525d10b7-1fcd-8739-23e2-7098dd0808dc, 'name': SearchDatastore_Task, 'duration_secs': 0.022836} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.032359] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.036075] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.036075] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea5cee62-11d9-4465-9bdd-70a61dde4fdc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.041593] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 763.041593] env[61972]: value = "task-1389127" [ 763.041593] env[61972]: _type = "Task" [ 763.041593] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.047633] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.082944] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.083471] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 763.087049] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.972s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.089106] env[61972]: INFO nova.compute.claims [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.551941] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389127, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.589330] env[61972]: DEBUG nova.compute.utils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 763.591321] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 763.591619] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.665122] env[61972]: DEBUG nova.policy [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c90c43e7f554d259c9f8a65d3e797dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '871c1d0ed74c4c46861d512087263041', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 763.870833] env[61972]: INFO nova.scheduler.client.report [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Deleted allocations for instance 7801858d-bc2a-466e-a6f2-a8c6b6ff4705 [ 764.013766] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Successfully created port: 05eb5f0f-024b-4404-81a8-f5f8246a82ff {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.052047] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524344} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.052047] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.052047] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.052047] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8f1f74c-766d-4989-98cc-18b5d2db268c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.057358] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 764.057358] env[61972]: value = "task-1389128" [ 764.057358] env[61972]: _type = "Task" [ 764.057358] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.064871] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.095720] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 764.380932] env[61972]: DEBUG oslo_concurrency.lockutils [None req-677fba32-4edb-4df9-8c9e-777e92265c16 tempest-ServersWithSpecificFlavorTestJSON-893421931 tempest-ServersWithSpecificFlavorTestJSON-893421931-project-member] Lock "7801858d-bc2a-466e-a6f2-a8c6b6ff4705" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.245s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.462387] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a2f1e6-8c18-4d46-bc65-23921c8ad104 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.471670] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe698b46-27f0-4d25-aa55-ef55383965d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.514697] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d03fc8-b208-41b1-aad0-c249fdd0952a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.523766] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c810b6a7-7868-4181-be30-d7f8cad1af21 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.537395] env[61972]: DEBUG nova.compute.provider_tree [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.566947] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080853} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.567232] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.567919] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d27e69b-0c18-462a-bf9d-5ae97f8d009d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.587380] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.587956] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b61a00d-8491-42bc-8196-d99f4f4790ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.611245] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 764.611245] env[61972]: value = "task-1389129" [ 764.611245] env[61972]: _type = "Task" [ 764.611245] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.619183] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389129, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.883563] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 765.040977] env[61972]: DEBUG nova.scheduler.client.report [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 765.108422] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 765.125143] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389129, 'name': ReconfigVM_Task, 'duration_secs': 0.288705} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.125143] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.125143] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fe67d9f-109f-409c-b0ad-46db07bc2f0e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.130312] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 765.130312] env[61972]: value = "task-1389130" [ 765.130312] env[61972]: _type = "Task" [ 765.130312] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.141643] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389130, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.143686] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 765.144329] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 765.144329] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.144329] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 765.144474] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.144503] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 765.144683] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 765.146036] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 765.146036] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 765.146036] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 765.146036] env[61972]: DEBUG nova.virt.hardware [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.146268] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28942282-3b5c-40ee-b512-cf0e1c06fd1c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.153442] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23352958-95ea-468c-82a7-68cd63f77d46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.280134] env[61972]: DEBUG nova.compute.manager [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Received event network-changed-05eb5f0f-024b-4404-81a8-f5f8246a82ff {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 765.280363] env[61972]: DEBUG nova.compute.manager [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Refreshing instance network info cache due to event network-changed-05eb5f0f-024b-4404-81a8-f5f8246a82ff. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 765.280571] env[61972]: DEBUG oslo_concurrency.lockutils [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] Acquiring lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.280716] env[61972]: DEBUG oslo_concurrency.lockutils [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] Acquired lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.280870] env[61972]: DEBUG nova.network.neutron [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Refreshing network info cache for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.378982] env[61972]: ERROR nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 765.378982] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 765.378982] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.378982] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.378982] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.378982] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.378982] env[61972]: ERROR nova.compute.manager raise self.value [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.378982] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.378982] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.378982] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.379378] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.379378] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.379378] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 765.379378] env[61972]: ERROR nova.compute.manager [ 765.379378] env[61972]: Traceback (most recent call last): [ 765.379378] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.379378] env[61972]: listener.cb(fileno) [ 765.379378] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.379378] env[61972]: result = function(*args, **kwargs) [ 765.379378] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.379378] env[61972]: return func(*args, **kwargs) [ 765.379378] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 765.379378] env[61972]: raise e [ 765.379378] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 765.379378] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 765.379378] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.379378] env[61972]: created_port_ids = self._update_ports_for_instance( [ 765.379378] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.379378] env[61972]: with excutils.save_and_reraise_exception(): [ 765.379378] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.379378] env[61972]: self.force_reraise() [ 765.379378] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.379378] env[61972]: raise self.value [ 765.379378] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.379378] env[61972]: updated_port = self._update_port( [ 765.379378] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.379378] env[61972]: _ensure_no_port_binding_failure(port) [ 765.379378] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.379378] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.379946] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 765.379946] env[61972]: Removing descriptor: 19 [ 765.379946] env[61972]: ERROR nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Traceback (most recent call last): [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] yield resources [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self.driver.spawn(context, instance, image_meta, [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.379946] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] vm_ref = self.build_virtual_machine(instance, [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] for vif in network_info: [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return self._sync_wrapper(fn, *args, **kwargs) [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self.wait() [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self[:] = self._gt.wait() [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return self._exit_event.wait() [ 765.380195] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] result = hub.switch() [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return self.greenlet.switch() [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] result = function(*args, **kwargs) [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return func(*args, **kwargs) [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] raise e [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] nwinfo = self.network_api.allocate_for_instance( [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.380555] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] created_port_ids = self._update_ports_for_instance( [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] with excutils.save_and_reraise_exception(): [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self.force_reraise() [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] raise self.value [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] updated_port = self._update_port( [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] _ensure_no_port_binding_failure(port) [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.380808] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] raise exception.PortBindingFailed(port_id=port['id']) [ 765.381047] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 765.381047] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] [ 765.381047] env[61972]: INFO nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Terminating instance [ 765.407437] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.549906] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.550504] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 765.556499] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.011s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.556499] env[61972]: DEBUG nova.objects.instance [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61972) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 765.641416] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389130, 'name': Rename_Task, 'duration_secs': 0.132251} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.641416] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.641727] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6617dc5a-1dc5-44bd-bf81-eeab40c85c23 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.648026] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 765.648026] env[61972]: value = "task-1389131" [ 765.648026] env[61972]: _type = "Task" [ 765.648026] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.654981] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.803182] env[61972]: DEBUG nova.network.neutron [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.884721] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.917424] env[61972]: DEBUG nova.network.neutron [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.060836] env[61972]: DEBUG nova.compute.utils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 766.065584] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 766.065788] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.123595] env[61972]: DEBUG nova.policy [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cefef67f4ae0451aaa108df20aa7a3db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a685a448ff041db8bc49b4429688e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 766.160534] env[61972]: DEBUG oslo_vmware.api [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389131, 'name': PowerOnVM_Task, 'duration_secs': 0.440754} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.160983] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 766.161277] env[61972]: INFO nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Took 5.30 seconds to spawn the instance on the hypervisor. [ 766.161613] env[61972]: DEBUG nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 766.162475] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ff9661-56f9-4eb9-837c-37d491bdf11a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.420737] env[61972]: DEBUG oslo_concurrency.lockutils [req-6fa27b8a-383d-453e-acb4-e0abd36f49ad req-2bb33100-216f-4a65-b6f5-05d5b8afe905 service nova] Releasing lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.421164] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquired lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.422021] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.566096] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 766.570042] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f43312bf-c623-4828-83f6-f9167a5eaf8d tempest-ServersAdmin275Test-33558719 tempest-ServersAdmin275Test-33558719-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.571247] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.594s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.683542] env[61972]: INFO nova.compute.manager [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Took 25.96 seconds to build instance. [ 766.762205] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Successfully created port: 738272d9-ef01-458b-b987-d513d12f7c81 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.947265] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.185588] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c33653e3-405d-47ab-b794-489f3a800cb2 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "036a2dfc-615d-410a-8a3f-32de621879c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.410s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.327573] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.426144] env[61972]: DEBUG nova.compute.manager [req-91c45630-29d6-4697-b1a4-81b11848286c req-b1f08625-d99a-4eff-a41b-4970a9dbc266 service nova] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Received event network-vif-deleted-05eb5f0f-024b-4404-81a8-f5f8246a82ff {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 767.583018] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 767.615255] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 767.615945] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 767.617044] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.617044] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 767.617044] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.617044] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 767.617044] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 767.617478] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 767.617780] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 767.618070] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 767.618356] env[61972]: DEBUG nova.virt.hardware [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.619598] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52f8c28-98a4-4870-aab2-391ad38a40e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.623292] env[61972]: WARNING nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 767.623712] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance dc5ef08a-8692-4274-84df-7c2923099249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.623944] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance bc10dded-e669-4fdb-9f5b-cc6abc3a37c7 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.624166] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 036a2dfc-615d-410a-8a3f-32de621879c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.624370] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 6cda8874-6af5-490a-b9a2-323992265eb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.626172] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 5b7223bd-66f3-44ec-b3bc-e9072eca515e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.631998] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbfa064-92b0-41aa-9740-73e4fc3a01f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.691020] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 767.831258] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Releasing lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.831746] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 767.831946] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.832264] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-663ebfdc-8a08-474b-88f2-3b9a5040558b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.847029] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80282d8b-0539-4c85-8189-f90dfa8217ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.870103] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6cda8874-6af5-490a-b9a2-323992265eb4 could not be found. [ 767.870579] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 767.870932] env[61972]: INFO nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 767.872157] env[61972]: DEBUG oslo.service.loopingcall [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.872538] env[61972]: DEBUG nova.compute.manager [-] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 767.872619] env[61972]: DEBUG nova.network.neutron [-] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.889797] env[61972]: DEBUG nova.network.neutron [-] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.036253] env[61972]: INFO nova.compute.manager [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Rebuilding instance [ 768.084887] env[61972]: DEBUG nova.compute.manager [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 768.089019] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fa32d0-84d6-48cb-9496-c523ad5e5aaf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.127830] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance dab76349-85ba-4513-afa7-d9a33da1b1fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.208152] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.391911] env[61972]: DEBUG nova.network.neutron [-] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.505117] env[61972]: ERROR nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 768.505117] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 768.505117] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.505117] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.505117] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.505117] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.505117] env[61972]: ERROR nova.compute.manager raise self.value [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.505117] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 768.505117] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.505117] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 768.505620] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.505620] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 768.505620] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 768.505620] env[61972]: ERROR nova.compute.manager [ 768.505620] env[61972]: Traceback (most recent call last): [ 768.505620] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 768.505620] env[61972]: listener.cb(fileno) [ 768.505620] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 768.505620] env[61972]: result = function(*args, **kwargs) [ 768.505620] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 768.505620] env[61972]: return func(*args, **kwargs) [ 768.505620] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 768.505620] env[61972]: raise e [ 768.505620] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 768.505620] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 768.505620] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.505620] env[61972]: created_port_ids = self._update_ports_for_instance( [ 768.505620] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.505620] env[61972]: with excutils.save_and_reraise_exception(): [ 768.505620] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.505620] env[61972]: self.force_reraise() [ 768.505620] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.505620] env[61972]: raise self.value [ 768.505620] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.505620] env[61972]: updated_port = self._update_port( [ 768.505620] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.505620] env[61972]: _ensure_no_port_binding_failure(port) [ 768.505620] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.505620] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 768.506488] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 768.506488] env[61972]: Removing descriptor: 19 [ 768.506488] env[61972]: ERROR nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Traceback (most recent call last): [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] yield resources [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self.driver.spawn(context, instance, image_meta, [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 768.506488] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] vm_ref = self.build_virtual_machine(instance, [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] vif_infos = vmwarevif.get_vif_info(self._session, [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] for vif in network_info: [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return self._sync_wrapper(fn, *args, **kwargs) [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self.wait() [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self[:] = self._gt.wait() [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return self._exit_event.wait() [ 768.506810] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] result = hub.switch() [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return self.greenlet.switch() [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] result = function(*args, **kwargs) [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return func(*args, **kwargs) [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] raise e [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] nwinfo = self.network_api.allocate_for_instance( [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.507581] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] created_port_ids = self._update_ports_for_instance( [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] with excutils.save_and_reraise_exception(): [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self.force_reraise() [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] raise self.value [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] updated_port = self._update_port( [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] _ensure_no_port_binding_failure(port) [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.507935] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] raise exception.PortBindingFailed(port_id=port['id']) [ 768.508273] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 768.508273] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] [ 768.508273] env[61972]: INFO nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Terminating instance [ 768.632511] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance b986f147-a782-467c-92d1-bffb6a50c450 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.897956] env[61972]: INFO nova.compute.manager [-] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Took 1.02 seconds to deallocate network for instance. [ 768.899178] env[61972]: DEBUG nova.compute.claims [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 768.899485] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.011971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.011971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.011971] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.101634] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 769.101960] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-380d4d5e-ff0e-466b-9de7-c3cebafd5a46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.109848] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 769.109848] env[61972]: value = "task-1389132" [ 769.109848] env[61972]: _type = "Task" [ 769.109848] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.117800] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.135829] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 21c83740-56b6-4cc8-b97b-2b7a00380b91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.454713] env[61972]: DEBUG nova.compute.manager [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Received event network-changed-738272d9-ef01-458b-b987-d513d12f7c81 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 769.454916] env[61972]: DEBUG nova.compute.manager [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Refreshing instance network info cache due to event network-changed-738272d9-ef01-458b-b987-d513d12f7c81. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 769.455294] env[61972]: DEBUG oslo_concurrency.lockutils [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] Acquiring lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.531268] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.621878] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389132, 'name': PowerOffVM_Task, 'duration_secs': 0.118297} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.622172] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 769.622539] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.623363] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59697a78-9a7e-40af-aa6c-7711e950428c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.629907] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 769.631901] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf0f42c2-49fc-4084-8f9d-bae0185b59aa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.639552] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 479b311e-e027-4724-bd8b-dffa8903b538 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.649427] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.656566] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 769.656850] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 769.661090] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Deleting the datastore file [datastore2] 036a2dfc-615d-410a-8a3f-32de621879c2 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.662027] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-229be031-c5fd-4146-bd13-9c69a821da9f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.669148] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 769.669148] env[61972]: value = "task-1389134" [ 769.669148] env[61972]: _type = "Task" [ 769.669148] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.681129] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.143061] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 47bd9677-375a-413b-a5c5-989d491adec9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.152527] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.152975] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 770.153451] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 770.154134] env[61972]: DEBUG oslo_concurrency.lockutils [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] Acquired lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.154338] env[61972]: DEBUG nova.network.neutron [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Refreshing network info cache for port 738272d9-ef01-458b-b987-d513d12f7c81 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 770.155710] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bfa753f-39c6-4419-a985-19c446376bdf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.165642] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea6b10a-8824-4343-bb83-2469ef11ff2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.184351] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091436} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.184600] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.184774] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 770.184944] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.191770] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5b7223bd-66f3-44ec-b3bc-e9072eca515e could not be found. [ 770.191982] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.192175] env[61972]: INFO nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 770.192434] env[61972]: DEBUG oslo.service.loopingcall [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.192876] env[61972]: DEBUG nova.compute.manager [-] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 770.193041] env[61972]: DEBUG nova.network.neutron [-] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.218247] env[61972]: DEBUG nova.network.neutron [-] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.644945] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance a5a78743-e155-4ded-854e-822976192097 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.679186] env[61972]: DEBUG nova.network.neutron [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.721390] env[61972]: DEBUG nova.network.neutron [-] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.833971] env[61972]: DEBUG nova.network.neutron [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.149259] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 67ecabfd-4efc-4e1c-a708-107197cfd018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.224509] env[61972]: INFO nova.compute.manager [-] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Took 1.03 seconds to deallocate network for instance. [ 771.227071] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 771.227184] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 771.227330] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.227539] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 771.227677] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.227905] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 771.228198] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 771.228363] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 771.228530] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 771.228688] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 771.228857] env[61972]: DEBUG nova.virt.hardware [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 771.231109] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2898ddb5-48c1-46a2-a760-b5b37bd234bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.234397] env[61972]: DEBUG nova.compute.claims [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 771.234535] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.240489] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4408bf51-d701-40d2-a563-6e0fb2e67d0a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.254561] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.260298] env[61972]: DEBUG oslo.service.loopingcall [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.260555] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.260760] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-259ad2df-e61c-4f8b-89a5-7e10cd03d58f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.280618] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 771.280618] env[61972]: value = "task-1389135" [ 771.280618] env[61972]: _type = "Task" [ 771.280618] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.289050] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389135, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.336343] env[61972]: DEBUG oslo_concurrency.lockutils [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] Releasing lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.336644] env[61972]: DEBUG nova.compute.manager [req-f742f1b8-5e43-4fd2-ac9a-e2f46f6015e9 req-5bb4e175-258f-4efe-82ee-f8021bde1490 service nova] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Received event network-vif-deleted-738272d9-ef01-458b-b987-d513d12f7c81 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 771.652762] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 49cd5798-1f76-4690-bea7-cebd98a84f5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.791289] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389135, 'name': CreateVM_Task, 'duration_secs': 0.265824} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.791621] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 771.792143] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.792428] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.792871] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 771.793221] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a5b8124-2e5a-4299-84b1-00cc4eb8f6b3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.797837] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 771.797837] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f6abaf-0c1e-f8b7-f711-83875d196bd1" [ 771.797837] env[61972]: _type = "Task" [ 771.797837] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.807181] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f6abaf-0c1e-f8b7-f711-83875d196bd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.156739] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 8a9a51b5-a8a5-4bda-a36c-682758f50745 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.312509] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f6abaf-0c1e-f8b7-f711-83875d196bd1, 'name': SearchDatastore_Task, 'duration_secs': 0.009729} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.312509] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.312509] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.312509] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.312878] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.313222] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.313786] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56005f0b-63ee-4518-8087-eb4f26c27e60 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.325142] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.325142] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 772.325142] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f4d4f8f-914d-4532-9ebd-9cf8b966f21a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.333036] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 772.333036] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]523b2b0a-5038-c6dc-4a11-ceca489da2a5" [ 772.333036] env[61972]: _type = "Task" [ 772.333036] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.341490] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523b2b0a-5038-c6dc-4a11-ceca489da2a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.667092] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.843152] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523b2b0a-5038-c6dc-4a11-ceca489da2a5, 'name': SearchDatastore_Task, 'duration_secs': 0.008551} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.843989] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dce5beb9-67ee-469d-8aba-2bdc12605c4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.849278] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 772.849278] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52910c24-242c-2c50-9cee-67fa454c88d2" [ 772.849278] env[61972]: _type = "Task" [ 772.849278] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.856786] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52910c24-242c-2c50-9cee-67fa454c88d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.170355] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 0cd09167-2c2f-4cad-b26d-35aa208fbf79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.364719] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52910c24-242c-2c50-9cee-67fa454c88d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011653} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.364891] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.365950] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 773.365950] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5081000-8091-47a7-aa6a-8b300dfbe3b2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.373638] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 773.373638] env[61972]: value = "task-1389136" [ 773.373638] env[61972]: _type = "Task" [ 773.373638] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.386160] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.673569] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance a77d41aa-13ba-4d26-b5fd-4928891948ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.884025] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389136, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478103} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.884180] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.884458] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.884834] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1bd99731-d870-47cb-990e-a0676a63c586 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.890655] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 773.890655] env[61972]: value = "task-1389137" [ 773.890655] env[61972]: _type = "Task" [ 773.890655] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.897905] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.180010] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.400759] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080168} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.401041] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.401807] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3839aa-e75d-43af-a820-8da6c78d4316 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.420701] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.420977] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bd4bbc0-b442-4497-b7e4-4914f0635c9d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.442872] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 774.442872] env[61972]: value = "task-1389138" [ 774.442872] env[61972]: _type = "Task" [ 774.442872] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.450979] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389138, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.685211] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 89cbc6ec-7546-443c-9abb-47940d223daa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.702780] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "21440243-458c-4640-b0ba-8f3b8b1b0720" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.703016] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.952480] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389138, 'name': ReconfigVM_Task, 'duration_secs': 0.447354} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.952763] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 036a2dfc-615d-410a-8a3f-32de621879c2/036a2dfc-615d-410a-8a3f-32de621879c2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.953376] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e8f0a28-6a13-40fe-be3b-2c9b7abfd6bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.959887] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 774.959887] env[61972]: value = "task-1389139" [ 774.959887] env[61972]: _type = "Task" [ 774.959887] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.968687] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389139, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.188139] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance caad50a8-e0ad-4ca9-b391-691ead1756f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.470663] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389139, 'name': Rename_Task, 'duration_secs': 0.123521} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.470962] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.471208] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b187e08-25aa-4885-9476-d7c6bfe5b421 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.477302] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 775.477302] env[61972]: value = "task-1389140" [ 775.477302] env[61972]: _type = "Task" [ 775.477302] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.484344] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.691669] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 3d424523-b45d-4174-ac7a-08fd653e314f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.987752] env[61972]: DEBUG oslo_vmware.api [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389140, 'name': PowerOnVM_Task, 'duration_secs': 0.408035} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.987958] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.988141] env[61972]: DEBUG nova.compute.manager [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 775.988903] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35aab056-fbe8-4481-b6ee-d9a4139398f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.194888] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e2b6dd4e-b639-4553-a45f-87c155506ea3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 776.503894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.697972] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 94bd64b9-3d20-4631-baed-4500f9beb9c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 776.991938] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "036a2dfc-615d-410a-8a3f-32de621879c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.992355] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "036a2dfc-615d-410a-8a3f-32de621879c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.992592] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "036a2dfc-615d-410a-8a3f-32de621879c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.992775] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "036a2dfc-615d-410a-8a3f-32de621879c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.992946] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "036a2dfc-615d-410a-8a3f-32de621879c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.995782] env[61972]: INFO nova.compute.manager [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Terminating instance [ 777.200651] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 667aff7f-57d5-4133-934d-386602a866f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 777.201059] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 777.201059] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 777.499071] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "refresh_cache-036a2dfc-615d-410a-8a3f-32de621879c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.499267] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquired lock "refresh_cache-036a2dfc-615d-410a-8a3f-32de621879c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.499441] env[61972]: DEBUG nova.network.neutron [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.501098] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd41009-2bd2-4d19-9ca8-ede9a277fae3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.509745] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62a4b86-d269-428a-a286-65206d9d34c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.271532] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ba8c3c-5d0a-440a-8416-9897243ef725 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.279812] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f7a96c-cbee-4a4b-a102-39143bc8c136 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.293184] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.295033] env[61972]: DEBUG nova.network.neutron [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.366858] env[61972]: DEBUG nova.network.neutron [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.798780] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 778.869085] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Releasing lock "refresh_cache-036a2dfc-615d-410a-8a3f-32de621879c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.869522] env[61972]: DEBUG nova.compute.manager [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 778.869711] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.870844] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15e3fe8-54c7-4338-a733-a4d37038ad3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.878760] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 778.879042] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82ef5e24-eba7-4acb-a433-d7ed06b55771 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.885115] env[61972]: DEBUG oslo_vmware.api [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 778.885115] env[61972]: value = "task-1389141" [ 778.885115] env[61972]: _type = "Task" [ 778.885115] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.892405] env[61972]: DEBUG oslo_vmware.api [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.303496] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 779.303838] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.733s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.304130] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.398s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.307214] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.307367] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Cleaning up deleted instances {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11557}} [ 779.395400] env[61972]: DEBUG oslo_vmware.api [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389141, 'name': PowerOffVM_Task, 'duration_secs': 0.110985} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.395791] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.395875] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.396136] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfee3ab1-4ab6-4e9c-9ea8-e8325402712a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.420516] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.420695] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.420912] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Deleting the datastore file [datastore1] 036a2dfc-615d-410a-8a3f-32de621879c2 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.421191] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e65464cf-b8a7-416b-8c4d-5d6fc3c63498 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.427770] env[61972]: DEBUG oslo_vmware.api [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for the task: (returnval){ [ 779.427770] env[61972]: value = "task-1389143" [ 779.427770] env[61972]: _type = "Task" [ 779.427770] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.435097] env[61972]: DEBUG oslo_vmware.api [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.812938] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] There are 1 instances to clean {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11566}} [ 779.813237] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 72d434a7-ea70-4594-971f-7eec8ebea153] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 779.900533] env[61972]: DEBUG nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 779.917049] env[61972]: DEBUG nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 779.917276] env[61972]: DEBUG nova.compute.provider_tree [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 779.928276] env[61972]: DEBUG nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 779.939190] env[61972]: DEBUG oslo_vmware.api [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Task: {'id': task-1389143, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116577} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.939511] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 779.939619] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 779.939781] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.939953] env[61972]: INFO nova.compute.manager [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Took 1.07 seconds to destroy the instance on the hypervisor. [ 779.940201] env[61972]: DEBUG oslo.service.loopingcall [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.940411] env[61972]: DEBUG nova.compute.manager [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 779.940492] env[61972]: DEBUG nova.network.neutron [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.947741] env[61972]: DEBUG nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 779.954682] env[61972]: DEBUG nova.network.neutron [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.208541] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aab99a6-e25b-4d92-b4b6-be90561433a3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.216495] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647d370e-1e94-4eca-96ea-b8ffa7603e19 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.245317] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ec3a72-1dc2-4e6c-8a39-b7b216b2981b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.252810] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17376f3e-085d-4670-83b3-d0f4a0a67c31 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.265614] env[61972]: DEBUG nova.compute.provider_tree [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.316734] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.316999] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Cleaning up deleted instances with incomplete migration {{(pid=61972) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11595}} [ 780.457417] env[61972]: DEBUG nova.network.neutron [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.769291] env[61972]: DEBUG nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 780.820258] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.960053] env[61972]: INFO nova.compute.manager [-] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Took 1.02 seconds to deallocate network for instance. [ 781.274638] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.970s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.275691] env[61972]: ERROR nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] Traceback (most recent call last): [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self.driver.spawn(context, instance, image_meta, [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] vm_ref = self.build_virtual_machine(instance, [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] vif_infos = vmwarevif.get_vif_info(self._session, [ 781.275691] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] for vif in network_info: [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return self._sync_wrapper(fn, *args, **kwargs) [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self.wait() [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self[:] = self._gt.wait() [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return self._exit_event.wait() [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] result = hub.switch() [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.276044] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return self.greenlet.switch() [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] result = function(*args, **kwargs) [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] return func(*args, **kwargs) [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] raise e [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] nwinfo = self.network_api.allocate_for_instance( [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] created_port_ids = self._update_ports_for_instance( [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] with excutils.save_and_reraise_exception(): [ 781.276780] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] self.force_reraise() [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] raise self.value [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] updated_port = self._update_port( [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] _ensure_no_port_binding_failure(port) [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] raise exception.PortBindingFailed(port_id=port['id']) [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] nova.exception.PortBindingFailed: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. [ 781.277077] env[61972]: ERROR nova.compute.manager [instance: dc5ef08a-8692-4274-84df-7c2923099249] [ 781.277320] env[61972]: DEBUG nova.compute.utils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 781.278377] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.564s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.279717] env[61972]: INFO nova.compute.claims [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.284828] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Build of instance dc5ef08a-8692-4274-84df-7c2923099249 was re-scheduled: Binding failed for port c14c4024-5e3d-49f9-a5c5-db5803e5b05f, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 781.284828] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 781.284828] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquiring lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.284828] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Acquired lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.285043] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 781.466912] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.805685] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.941620] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.444358] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Releasing lock "refresh_cache-dc5ef08a-8692-4274-84df-7c2923099249" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.444358] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 782.444358] env[61972]: DEBUG nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 782.444358] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 782.458195] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.588070] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba59ba4-d320-462e-bf45-ecdf1bb74e66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.595581] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885d3cd1-15f0-455e-9d15-82dce2f67d09 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.624438] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58fb406-7d46-42b8-a92c-88510fc501e0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.631036] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f096f2e-8979-4428-a281-f27bd2a406e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.644572] env[61972]: DEBUG nova.compute.provider_tree [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.960859] env[61972]: DEBUG nova.network.neutron [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.147607] env[61972]: DEBUG nova.scheduler.client.report [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 783.465415] env[61972]: INFO nova.compute.manager [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] [instance: dc5ef08a-8692-4274-84df-7c2923099249] Took 1.02 seconds to deallocate network for instance. [ 783.655021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.655021] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 783.656046] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.214s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.656769] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.658851] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.029s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.660475] env[61972]: INFO nova.compute.claims [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.684868] env[61972]: INFO nova.scheduler.client.report [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Deleted allocations for instance 9fd9fc35-7105-4941-8e05-cf4e45bb5d29 [ 784.165418] env[61972]: DEBUG nova.compute.utils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.168681] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 784.168886] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.192254] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cf771c6-a7c3-45de-bca1-31beb220b9d3 tempest-ServersAdmin275Test-102420746 tempest-ServersAdmin275Test-102420746-project-member] Lock "9fd9fc35-7105-4941-8e05-cf4e45bb5d29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.061s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.379906] env[61972]: DEBUG nova.policy [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4d2671dab9481fb94161b1188de693', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6c59f1b269040d5a4605fa7a178cdf8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 784.504247] env[61972]: INFO nova.scheduler.client.report [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Deleted allocations for instance dc5ef08a-8692-4274-84df-7c2923099249 [ 784.669608] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 784.698761] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Successfully created port: a9245a41-0f57-44b9-a0e5-84924344fcfb {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.018388] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0539a42f-e488-41ee-9d69-69b97148ead9 tempest-ServerActionsTestOtherB-2123690486 tempest-ServerActionsTestOtherB-2123690486-project-member] Lock "dc5ef08a-8692-4274-84df-7c2923099249" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 171.192s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.026343] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9561e22e-e4c8-4545-9b6d-544bf55c29c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.040258] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be48c6fc-3967-4f19-a3b6-ac1320c799e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.071637] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3f616d-be12-4b32-9b06-0de6dec40b5c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.079546] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbb3e4e-5ed2-4066-9659-5557a105d70f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.094455] env[61972]: DEBUG nova.compute.provider_tree [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.519556] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 785.599491] env[61972]: DEBUG nova.scheduler.client.report [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 785.635496] env[61972]: DEBUG nova.compute.manager [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Received event network-changed-a9245a41-0f57-44b9-a0e5-84924344fcfb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 785.635698] env[61972]: DEBUG nova.compute.manager [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Refreshing instance network info cache due to event network-changed-a9245a41-0f57-44b9-a0e5-84924344fcfb. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 785.635949] env[61972]: DEBUG oslo_concurrency.lockutils [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] Acquiring lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.636110] env[61972]: DEBUG oslo_concurrency.lockutils [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] Acquired lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.636552] env[61972]: DEBUG nova.network.neutron [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Refreshing network info cache for port a9245a41-0f57-44b9-a0e5-84924344fcfb {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.682413] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 785.712756] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 785.713007] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 785.713198] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.713522] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 785.713837] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.716402] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 785.716402] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 785.716402] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 785.716402] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 785.716402] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 785.716549] env[61972]: DEBUG nova.virt.hardware [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.716549] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0eb237-227f-4d84-88c4-a371f8680cfb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.727028] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005b5062-db71-4d6b-8007-71dec7c1ac42 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.845187] env[61972]: ERROR nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 785.845187] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 785.845187] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 785.845187] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 785.845187] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.845187] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.845187] env[61972]: ERROR nova.compute.manager raise self.value [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 785.845187] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 785.845187] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.845187] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 785.845663] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.845663] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 785.845663] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 785.845663] env[61972]: ERROR nova.compute.manager [ 785.845663] env[61972]: Traceback (most recent call last): [ 785.845663] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 785.845663] env[61972]: listener.cb(fileno) [ 785.845663] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 785.845663] env[61972]: result = function(*args, **kwargs) [ 785.845663] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 785.845663] env[61972]: return func(*args, **kwargs) [ 785.845663] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 785.845663] env[61972]: raise e [ 785.845663] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 785.845663] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 785.845663] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 785.845663] env[61972]: created_port_ids = self._update_ports_for_instance( [ 785.845663] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 785.845663] env[61972]: with excutils.save_and_reraise_exception(): [ 785.845663] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.845663] env[61972]: self.force_reraise() [ 785.845663] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.845663] env[61972]: raise self.value [ 785.845663] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 785.845663] env[61972]: updated_port = self._update_port( [ 785.845663] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.845663] env[61972]: _ensure_no_port_binding_failure(port) [ 785.845663] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.845663] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 785.846475] env[61972]: nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 785.846475] env[61972]: Removing descriptor: 19 [ 785.846475] env[61972]: ERROR nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Traceback (most recent call last): [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] yield resources [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self.driver.spawn(context, instance, image_meta, [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 785.846475] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] vm_ref = self.build_virtual_machine(instance, [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] for vif in network_info: [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return self._sync_wrapper(fn, *args, **kwargs) [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self.wait() [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self[:] = self._gt.wait() [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return self._exit_event.wait() [ 785.846797] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] result = hub.switch() [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return self.greenlet.switch() [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] result = function(*args, **kwargs) [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return func(*args, **kwargs) [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] raise e [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] nwinfo = self.network_api.allocate_for_instance( [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 785.847149] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] created_port_ids = self._update_ports_for_instance( [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] with excutils.save_and_reraise_exception(): [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self.force_reraise() [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] raise self.value [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] updated_port = self._update_port( [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] _ensure_no_port_binding_failure(port) [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.847709] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] raise exception.PortBindingFailed(port_id=port['id']) [ 785.847969] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 785.847969] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] [ 785.847969] env[61972]: INFO nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Terminating instance [ 786.054072] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.104940] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.105493] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 786.109445] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.483s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.110552] env[61972]: INFO nova.compute.claims [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.158876] env[61972]: DEBUG nova.network.neutron [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.314993] env[61972]: DEBUG nova.network.neutron [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.351334] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.615349] env[61972]: DEBUG nova.compute.utils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 786.619381] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 786.619594] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.695742] env[61972]: DEBUG nova.policy [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e1217449554a459d4f5cb72bfc578f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b80714a72c14aef842c7b02001edd92', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 786.819888] env[61972]: DEBUG oslo_concurrency.lockutils [req-8078b971-7648-47fd-82af-eaaec595cb5f req-075eb23a-ce24-4412-a20e-e3d8441c470c service nova] Releasing lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.819888] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquired lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.819888] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.049935] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Successfully created port: 8f874ab6-1327-43ae-b5c7-23a5820b18c1 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.124168] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 787.368022] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.553522] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88f5d68-646b-454f-b6f5-3d04a6e2fbb8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.570667] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b23d668-051c-4dde-8c68-c0fb33a9ee0e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.635069] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58632a3-8969-436a-80ed-8f39fdda5e2c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.642481] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.650952] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e599e5f-8fb7-486c-bd5f-9e64b2baa480 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.676377] env[61972]: DEBUG nova.compute.provider_tree [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.891766] env[61972]: DEBUG nova.compute.manager [req-a80cd41f-bd6b-4f56-bca7-8715f92506bd req-858b0978-b025-4892-89be-2a1edd71dc9b service nova] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Received event network-vif-deleted-a9245a41-0f57-44b9-a0e5-84924344fcfb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 788.135406] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 788.148561] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Releasing lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.148816] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 788.149027] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.150383] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a16522ab-fa44-4ac3-83d4-53a5e1e1329c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.163747] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa93935-28b3-41a5-982e-eb3fc6e200fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.177605] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 788.177761] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 788.177979] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 788.178242] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 788.178443] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 788.178647] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 788.178907] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 788.179150] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 788.179381] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 788.179598] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 788.180224] env[61972]: DEBUG nova.virt.hardware [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 788.180746] env[61972]: DEBUG nova.scheduler.client.report [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 788.185948] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1924bcca-b9eb-4c30-9d54-0a040983575e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.193987] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3556a5-64ff-4121-8cd3-8a872c77663b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.204956] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dab76349-85ba-4513-afa7-d9a33da1b1fe could not be found. [ 788.204956] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.204956] env[61972]: INFO nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Took 0.06 seconds to destroy the instance on the hypervisor. [ 788.204956] env[61972]: DEBUG oslo.service.loopingcall [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.205820] env[61972]: DEBUG nova.compute.manager [-] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 788.206034] env[61972]: DEBUG nova.network.neutron [-] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.244490] env[61972]: DEBUG nova.network.neutron [-] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.689359] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.689977] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 788.692871] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.251s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.746956] env[61972]: DEBUG nova.network.neutron [-] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.880867] env[61972]: ERROR nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 788.880867] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 788.880867] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.880867] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.880867] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.880867] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.880867] env[61972]: ERROR nova.compute.manager raise self.value [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.880867] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 788.880867] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.880867] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 788.881338] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.881338] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 788.881338] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 788.881338] env[61972]: ERROR nova.compute.manager [ 788.881338] env[61972]: Traceback (most recent call last): [ 788.881338] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 788.881338] env[61972]: listener.cb(fileno) [ 788.881338] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.881338] env[61972]: result = function(*args, **kwargs) [ 788.881338] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.881338] env[61972]: return func(*args, **kwargs) [ 788.881338] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 788.881338] env[61972]: raise e [ 788.881338] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 788.881338] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 788.881338] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.881338] env[61972]: created_port_ids = self._update_ports_for_instance( [ 788.881338] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.881338] env[61972]: with excutils.save_and_reraise_exception(): [ 788.881338] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.881338] env[61972]: self.force_reraise() [ 788.881338] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.881338] env[61972]: raise self.value [ 788.881338] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.881338] env[61972]: updated_port = self._update_port( [ 788.881338] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.881338] env[61972]: _ensure_no_port_binding_failure(port) [ 788.881338] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.881338] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 788.882205] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 788.882205] env[61972]: Removing descriptor: 19 [ 788.882205] env[61972]: ERROR nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] Traceback (most recent call last): [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] yield resources [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self.driver.spawn(context, instance, image_meta, [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.882205] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] vm_ref = self.build_virtual_machine(instance, [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] for vif in network_info: [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return self._sync_wrapper(fn, *args, **kwargs) [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self.wait() [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self[:] = self._gt.wait() [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return self._exit_event.wait() [ 788.883081] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] result = hub.switch() [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return self.greenlet.switch() [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] result = function(*args, **kwargs) [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return func(*args, **kwargs) [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] raise e [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] nwinfo = self.network_api.allocate_for_instance( [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.883540] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] created_port_ids = self._update_ports_for_instance( [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] with excutils.save_and_reraise_exception(): [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self.force_reraise() [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] raise self.value [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] updated_port = self._update_port( [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] _ensure_no_port_binding_failure(port) [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.884212] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] raise exception.PortBindingFailed(port_id=port['id']) [ 788.884559] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 788.884559] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] [ 788.884559] env[61972]: INFO nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Terminating instance [ 789.197309] env[61972]: DEBUG nova.compute.utils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 789.202892] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 789.203250] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 789.241683] env[61972]: DEBUG nova.policy [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7412ca04d3de45e39ef81f5e1b97e1d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3d04867a4e544688e1c592a64b3b41d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 789.248669] env[61972]: INFO nova.compute.manager [-] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Took 1.04 seconds to deallocate network for instance. [ 789.253847] env[61972]: DEBUG nova.compute.claims [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 789.254082] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.285956] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.286440] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.387751] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.387951] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquired lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.388179] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.516586] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307efb10-489a-47e4-aaf8-b71ae95bbced {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.525235] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c51b84-60a1-4eba-8f20-5091f6616371 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.555170] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Successfully created port: 3f5337a6-a41f-4b7e-b0fb-69b42d57c945 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.557411] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c924a5-5cb3-4667-b9e2-2f5ba990ef4b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.565040] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19f83f5-894d-404b-90a9-b20c35a0e4a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.578587] env[61972]: DEBUG nova.compute.provider_tree [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.704199] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 789.925349] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.995348] env[61972]: DEBUG nova.compute.manager [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Received event network-changed-8f874ab6-1327-43ae-b5c7-23a5820b18c1 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 789.995348] env[61972]: DEBUG nova.compute.manager [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Refreshing instance network info cache due to event network-changed-8f874ab6-1327-43ae-b5c7-23a5820b18c1. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 789.995348] env[61972]: DEBUG oslo_concurrency.lockutils [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] Acquiring lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.079686] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.081431] env[61972]: DEBUG nova.scheduler.client.report [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 790.581524] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Releasing lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.581957] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 790.582174] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.582557] env[61972]: DEBUG oslo_concurrency.lockutils [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] Acquired lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.582717] env[61972]: DEBUG nova.network.neutron [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Refreshing network info cache for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.583980] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a24aa46b-c341-4ab1-8965-a421ee3c983d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.587016] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.894s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.587946] env[61972]: ERROR nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Traceback (most recent call last): [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self.driver.spawn(context, instance, image_meta, [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] vm_ref = self.build_virtual_machine(instance, [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] vif_infos = vmwarevif.get_vif_info(self._session, [ 790.587946] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] for vif in network_info: [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] return self._sync_wrapper(fn, *args, **kwargs) [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self.wait() [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self[:] = self._gt.wait() [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] return self._exit_event.wait() [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] current.throw(*self._exc) [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 790.588224] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] result = function(*args, **kwargs) [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] return func(*args, **kwargs) [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] raise e [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] nwinfo = self.network_api.allocate_for_instance( [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] created_port_ids = self._update_ports_for_instance( [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] with excutils.save_and_reraise_exception(): [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] self.force_reraise() [ 790.588511] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] raise self.value [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] updated_port = self._update_port( [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] _ensure_no_port_binding_failure(port) [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] raise exception.PortBindingFailed(port_id=port['id']) [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] nova.exception.PortBindingFailed: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. [ 790.588828] env[61972]: ERROR nova.compute.manager [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] [ 790.588828] env[61972]: DEBUG nova.compute.utils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 790.589889] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.939s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.591021] env[61972]: INFO nova.compute.claims [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.594538] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Build of instance bc10dded-e669-4fdb-9f5b-cc6abc3a37c7 was re-scheduled: Binding failed for port e89e6e71-0bc6-4f0f-94f3-fad27da19bda, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 790.595131] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 790.595214] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquiring lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.595359] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Acquired lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.595520] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.600631] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf0ab2d-75a2-4248-9717-32aa6c518fde {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.624202] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b986f147-a782-467c-92d1-bffb6a50c450 could not be found. [ 790.624297] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 790.624555] env[61972]: INFO nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Took 0.04 seconds to destroy the instance on the hypervisor. [ 790.624734] env[61972]: DEBUG oslo.service.loopingcall [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.624967] env[61972]: DEBUG nova.compute.manager [-] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 790.625172] env[61972]: DEBUG nova.network.neutron [-] [instance: b986f147-a782-467c-92d1-bffb6a50c450] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.647076] env[61972]: DEBUG nova.network.neutron [-] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.709223] env[61972]: ERROR nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 790.709223] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 790.709223] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 790.709223] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 790.709223] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.709223] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.709223] env[61972]: ERROR nova.compute.manager raise self.value [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 790.709223] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 790.709223] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 790.709223] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 790.709998] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 790.709998] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 790.709998] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 790.709998] env[61972]: ERROR nova.compute.manager [ 790.709998] env[61972]: Traceback (most recent call last): [ 790.709998] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 790.709998] env[61972]: listener.cb(fileno) [ 790.709998] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 790.709998] env[61972]: result = function(*args, **kwargs) [ 790.709998] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 790.709998] env[61972]: return func(*args, **kwargs) [ 790.709998] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 790.709998] env[61972]: raise e [ 790.709998] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 790.709998] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 790.709998] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 790.709998] env[61972]: created_port_ids = self._update_ports_for_instance( [ 790.709998] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 790.709998] env[61972]: with excutils.save_and_reraise_exception(): [ 790.709998] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.709998] env[61972]: self.force_reraise() [ 790.709998] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.709998] env[61972]: raise self.value [ 790.709998] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 790.709998] env[61972]: updated_port = self._update_port( [ 790.709998] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 790.709998] env[61972]: _ensure_no_port_binding_failure(port) [ 790.709998] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 790.709998] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 790.711324] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 790.711324] env[61972]: Removing descriptor: 19 [ 790.712770] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 790.738413] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 790.738646] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 790.738798] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.739062] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 790.739219] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.739365] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 790.739573] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 790.739724] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 790.739885] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 790.740052] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 790.740225] env[61972]: DEBUG nova.virt.hardware [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 790.741224] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a2ccd4-f28a-4e96-ba26-07d821d4eb4e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.749257] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7d83c8-5b88-413c-8b33-4c82129bf8e5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.765056] env[61972]: ERROR nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Traceback (most recent call last): [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] yield resources [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self.driver.spawn(context, instance, image_meta, [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] vm_ref = self.build_virtual_machine(instance, [ 790.765056] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] vif_infos = vmwarevif.get_vif_info(self._session, [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] for vif in network_info: [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] return self._sync_wrapper(fn, *args, **kwargs) [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self.wait() [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self[:] = self._gt.wait() [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] return self._exit_event.wait() [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 790.765426] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] current.throw(*self._exc) [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] result = function(*args, **kwargs) [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] return func(*args, **kwargs) [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] raise e [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] nwinfo = self.network_api.allocate_for_instance( [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] created_port_ids = self._update_ports_for_instance( [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] with excutils.save_and_reraise_exception(): [ 790.765818] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self.force_reraise() [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] raise self.value [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] updated_port = self._update_port( [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] _ensure_no_port_binding_failure(port) [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] raise exception.PortBindingFailed(port_id=port['id']) [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 790.766188] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] [ 790.766188] env[61972]: INFO nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Terminating instance [ 791.125386] env[61972]: DEBUG nova.network.neutron [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.135700] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.150766] env[61972]: DEBUG nova.network.neutron [-] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.270438] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquiring lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.270641] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquired lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.271025] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.313900] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.321866] env[61972]: DEBUG nova.network.neutron [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.653011] env[61972]: INFO nova.compute.manager [-] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Took 1.03 seconds to deallocate network for instance. [ 791.655413] env[61972]: DEBUG nova.compute.claims [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 791.655586] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.791059] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.816669] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Releasing lock "refresh_cache-bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.816945] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 791.817186] env[61972]: DEBUG nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 791.817381] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 791.823741] env[61972]: DEBUG oslo_concurrency.lockutils [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] Releasing lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.823967] env[61972]: DEBUG nova.compute.manager [req-e622ebbf-3bc8-4570-afc0-7eccc4caf312 req-b9ca8c93-f8de-471f-b954-b7dc1593fa58 service nova] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Received event network-vif-deleted-8f874ab6-1327-43ae-b5c7-23a5820b18c1 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 791.834295] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.890815] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.915914] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9a4fbd-254b-48aa-99b3-ed31739c0fac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.923803] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf8a8b2-d1bd-4b4c-8a4c-a424ba2d0282 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.971466] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf8e510-0337-4a31-b75d-3acef6228a88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.979167] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b2f353-f1b9-4a89-a9bd-84836d7e25b4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.993688] env[61972]: DEBUG nova.compute.provider_tree [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.062503] env[61972]: DEBUG nova.compute.manager [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Received event network-changed-3f5337a6-a41f-4b7e-b0fb-69b42d57c945 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 792.062503] env[61972]: DEBUG nova.compute.manager [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Refreshing instance network info cache due to event network-changed-3f5337a6-a41f-4b7e-b0fb-69b42d57c945. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 792.062503] env[61972]: DEBUG oslo_concurrency.lockutils [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] Acquiring lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.336749] env[61972]: DEBUG nova.network.neutron [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.395479] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Releasing lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.395751] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 792.395946] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 792.396276] env[61972]: DEBUG oslo_concurrency.lockutils [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] Acquired lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.396619] env[61972]: DEBUG nova.network.neutron [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Refreshing network info cache for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.398141] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02dad6ad-7f0e-4651-a2bb-ff0ffb7417cb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.407926] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3392f3-c88d-47df-8292-61e79040ecdc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.430543] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21c83740-56b6-4cc8-b97b-2b7a00380b91 could not be found. [ 792.431470] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.431687] env[61972]: INFO nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Took 0.04 seconds to destroy the instance on the hypervisor. [ 792.431926] env[61972]: DEBUG oslo.service.loopingcall [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.432426] env[61972]: DEBUG nova.compute.manager [-] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 792.432542] env[61972]: DEBUG nova.network.neutron [-] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.444857] env[61972]: DEBUG nova.network.neutron [-] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.497906] env[61972]: DEBUG nova.scheduler.client.report [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 792.840381] env[61972]: INFO nova.compute.manager [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] [instance: bc10dded-e669-4fdb-9f5b-cc6abc3a37c7] Took 1.02 seconds to deallocate network for instance. [ 792.912440] env[61972]: DEBUG nova.network.neutron [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.948145] env[61972]: DEBUG nova.network.neutron [-] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.987035] env[61972]: DEBUG nova.network.neutron [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.003090] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.003587] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 793.005993] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.599s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.007389] env[61972]: INFO nova.compute.claims [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.451531] env[61972]: INFO nova.compute.manager [-] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Took 1.02 seconds to deallocate network for instance. [ 793.453681] env[61972]: DEBUG nova.compute.claims [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 793.453681] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.489610] env[61972]: DEBUG oslo_concurrency.lockutils [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] Releasing lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.489870] env[61972]: DEBUG nova.compute.manager [req-9d87ee3a-67e5-4d0f-b3e2-244e7fb82255 req-b57628ec-7225-458e-bbf7-f671669ed7ce service nova] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Received event network-vif-deleted-3f5337a6-a41f-4b7e-b0fb-69b42d57c945 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 793.512418] env[61972]: DEBUG nova.compute.utils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 793.517035] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 793.517142] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.566965] env[61972]: DEBUG nova.policy [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4d2671dab9481fb94161b1188de693', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6c59f1b269040d5a4605fa7a178cdf8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 793.854113] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Successfully created port: 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.878144] env[61972]: INFO nova.scheduler.client.report [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Deleted allocations for instance bc10dded-e669-4fdb-9f5b-cc6abc3a37c7 [ 794.017733] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 794.386606] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9cde05-8563-4636-a9be-3e6b094c2e36 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.389492] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2088bfa7-c003-428c-ae92-90f9f3c3c96b tempest-ServersTestBootFromVolume-2044502181 tempest-ServersTestBootFromVolume-2044502181-project-member] Lock "bc10dded-e669-4fdb-9f5b-cc6abc3a37c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.836s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.397237] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9927129b-0e1a-494b-8118-49a8b74b239d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.430322] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728d9f84-a4a2-4666-be0a-866b07e0e006 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.438473] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641f8348-4cc7-4409-bf7f-7a250325f5bd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.453124] env[61972]: DEBUG nova.compute.provider_tree [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.895324] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 794.958933] env[61972]: DEBUG nova.scheduler.client.report [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 794.963836] env[61972]: DEBUG nova.compute.manager [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Received event network-changed-9ebe655a-cf1b-4918-b8ba-6bf0bf3def46 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 794.964039] env[61972]: DEBUG nova.compute.manager [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Refreshing instance network info cache due to event network-changed-9ebe655a-cf1b-4918-b8ba-6bf0bf3def46. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 794.964304] env[61972]: DEBUG oslo_concurrency.lockutils [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] Acquiring lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.964395] env[61972]: DEBUG oslo_concurrency.lockutils [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] Acquired lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.964537] env[61972]: DEBUG nova.network.neutron [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Refreshing network info cache for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.032094] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 795.062803] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 795.063058] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 795.063219] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.063405] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 795.063545] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.063714] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 795.063981] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 795.064160] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 795.064327] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 795.064486] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 795.064656] env[61972]: DEBUG nova.virt.hardware [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.065555] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4554337-7fb8-4f81-8e65-bac89bc7b574 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.073543] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee73c14-3165-49af-a5ee-1f8f19d7babe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.150687] env[61972]: ERROR nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 795.150687] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 795.150687] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.150687] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.150687] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.150687] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.150687] env[61972]: ERROR nova.compute.manager raise self.value [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.150687] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 795.150687] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.150687] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 795.151154] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.151154] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 795.151154] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 795.151154] env[61972]: ERROR nova.compute.manager [ 795.151154] env[61972]: Traceback (most recent call last): [ 795.151154] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 795.151154] env[61972]: listener.cb(fileno) [ 795.151154] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.151154] env[61972]: result = function(*args, **kwargs) [ 795.151154] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 795.151154] env[61972]: return func(*args, **kwargs) [ 795.151154] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 795.151154] env[61972]: raise e [ 795.151154] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 795.151154] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 795.151154] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.151154] env[61972]: created_port_ids = self._update_ports_for_instance( [ 795.151154] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.151154] env[61972]: with excutils.save_and_reraise_exception(): [ 795.151154] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.151154] env[61972]: self.force_reraise() [ 795.151154] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.151154] env[61972]: raise self.value [ 795.151154] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.151154] env[61972]: updated_port = self._update_port( [ 795.151154] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.151154] env[61972]: _ensure_no_port_binding_failure(port) [ 795.151154] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.151154] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 795.151911] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 795.151911] env[61972]: Removing descriptor: 19 [ 795.151911] env[61972]: ERROR nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Traceback (most recent call last): [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] yield resources [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self.driver.spawn(context, instance, image_meta, [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 795.151911] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] vm_ref = self.build_virtual_machine(instance, [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] vif_infos = vmwarevif.get_vif_info(self._session, [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] for vif in network_info: [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return self._sync_wrapper(fn, *args, **kwargs) [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self.wait() [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self[:] = self._gt.wait() [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return self._exit_event.wait() [ 795.152326] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] result = hub.switch() [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return self.greenlet.switch() [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] result = function(*args, **kwargs) [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return func(*args, **kwargs) [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] raise e [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] nwinfo = self.network_api.allocate_for_instance( [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.152639] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] created_port_ids = self._update_ports_for_instance( [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] with excutils.save_and_reraise_exception(): [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self.force_reraise() [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] raise self.value [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] updated_port = self._update_port( [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] _ensure_no_port_binding_failure(port) [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.152962] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] raise exception.PortBindingFailed(port_id=port['id']) [ 795.153295] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 795.153295] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] [ 795.153295] env[61972]: INFO nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Terminating instance [ 795.419239] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.467041] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.467612] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 795.474028] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.265s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.474616] env[61972]: INFO nova.compute.claims [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.491823] env[61972]: DEBUG nova.network.neutron [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.605114] env[61972]: DEBUG nova.network.neutron [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.656366] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.972941] env[61972]: DEBUG nova.compute.utils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 795.974502] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 795.974678] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.016317] env[61972]: DEBUG nova.policy [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c4d2671dab9481fb94161b1188de693', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6c59f1b269040d5a4605fa7a178cdf8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 796.107816] env[61972]: DEBUG oslo_concurrency.lockutils [req-5520b98f-c8b8-4e91-bb03-7dd578ee197a req-31748947-c9db-465d-b473-c6aa03659e35 service nova] Releasing lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.108280] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquired lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.108468] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.359112] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Successfully created port: 1cb467c5-dc03-4a5b-b564-db26df5763a3 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.479393] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 796.641821] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.818862] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.840783] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7017957-9be0-4ef8-99d3-74dfb1981f6b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.848996] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f387142c-942b-41ed-a7ce-a5c3933e904a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.882238] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b43b74-2b7d-41ca-897c-b16edb8aba83 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.889780] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cabee4-531d-4077-bf38-8ef8980a1829 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.903094] env[61972]: DEBUG nova.compute.provider_tree [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.985104] env[61972]: DEBUG nova.compute.manager [req-7ea06b60-8ea5-4e20-bb3f-8a283071e8ec req-6a313dde-e646-4612-af7b-8e09d40e41c1 service nova] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Received event network-vif-deleted-9ebe655a-cf1b-4918-b8ba-6bf0bf3def46 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 797.321236] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Releasing lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.321672] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 797.321865] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 797.322192] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfc89da4-1488-43c3-a3ad-2faa5c441758 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.333682] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c98fca-c614-4e9c-82ad-d94b53bdb981 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.355943] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 479b311e-e027-4724-bd8b-dffa8903b538 could not be found. [ 797.356394] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.356588] env[61972]: INFO nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Took 0.03 seconds to destroy the instance on the hypervisor. [ 797.356968] env[61972]: DEBUG oslo.service.loopingcall [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.357063] env[61972]: DEBUG nova.compute.manager [-] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 797.357155] env[61972]: DEBUG nova.network.neutron [-] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.392627] env[61972]: DEBUG nova.network.neutron [-] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.405868] env[61972]: DEBUG nova.scheduler.client.report [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 797.492554] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 797.527147] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 797.527341] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 797.527532] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.527751] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 797.527934] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.528134] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 797.528380] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 797.528575] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 797.528774] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 797.528971] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 797.529202] env[61972]: DEBUG nova.virt.hardware [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.530080] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a80b2c-40cd-4064-8097-ab7980754aa8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.538202] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e0b579-e6f8-4530-85f1-938154bbbcd7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.618268] env[61972]: ERROR nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 797.618268] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 797.618268] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.618268] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.618268] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.618268] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.618268] env[61972]: ERROR nova.compute.manager raise self.value [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.618268] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 797.618268] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.618268] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 797.618743] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.618743] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 797.618743] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 797.618743] env[61972]: ERROR nova.compute.manager [ 797.618743] env[61972]: Traceback (most recent call last): [ 797.618743] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 797.618743] env[61972]: listener.cb(fileno) [ 797.618743] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 797.618743] env[61972]: result = function(*args, **kwargs) [ 797.618743] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 797.618743] env[61972]: return func(*args, **kwargs) [ 797.618743] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 797.618743] env[61972]: raise e [ 797.618743] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 797.618743] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 797.618743] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.618743] env[61972]: created_port_ids = self._update_ports_for_instance( [ 797.618743] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.618743] env[61972]: with excutils.save_and_reraise_exception(): [ 797.618743] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.618743] env[61972]: self.force_reraise() [ 797.618743] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.618743] env[61972]: raise self.value [ 797.618743] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.618743] env[61972]: updated_port = self._update_port( [ 797.618743] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.618743] env[61972]: _ensure_no_port_binding_failure(port) [ 797.618743] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.618743] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 797.619556] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 797.619556] env[61972]: Removing descriptor: 19 [ 797.619556] env[61972]: ERROR nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Traceback (most recent call last): [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] yield resources [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self.driver.spawn(context, instance, image_meta, [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 797.619556] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] vm_ref = self.build_virtual_machine(instance, [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] vif_infos = vmwarevif.get_vif_info(self._session, [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] for vif in network_info: [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return self._sync_wrapper(fn, *args, **kwargs) [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self.wait() [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self[:] = self._gt.wait() [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return self._exit_event.wait() [ 797.619979] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] result = hub.switch() [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return self.greenlet.switch() [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] result = function(*args, **kwargs) [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return func(*args, **kwargs) [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] raise e [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] nwinfo = self.network_api.allocate_for_instance( [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.620637] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] created_port_ids = self._update_ports_for_instance( [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] with excutils.save_and_reraise_exception(): [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self.force_reraise() [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] raise self.value [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] updated_port = self._update_port( [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] _ensure_no_port_binding_failure(port) [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.621096] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] raise exception.PortBindingFailed(port_id=port['id']) [ 797.621430] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 797.621430] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] [ 797.621430] env[61972]: INFO nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Terminating instance [ 797.895519] env[61972]: DEBUG nova.network.neutron [-] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.912055] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.912611] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 797.915074] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.016s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.123789] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.124067] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquired lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.124201] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.401536] env[61972]: INFO nova.compute.manager [-] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Took 1.04 seconds to deallocate network for instance. [ 798.408028] env[61972]: DEBUG nova.compute.claims [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 798.408171] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.419564] env[61972]: DEBUG nova.compute.utils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.420926] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 798.421112] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 798.505952] env[61972]: DEBUG nova.policy [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bdbb74770364b4a8700aa7aa967b584', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b445e0d9bbfa4a528bc49fff621c484c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 798.663858] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.810279] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e4cd26-1a3c-44a7-a442-7883d56cba68 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.818132] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54690a0e-ac73-4229-bfbd-8f0cc9868420 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.851219] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbbee6a-a594-41e8-a8a5-9b9a5ebb3de0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.858444] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.860648] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7921a720-6405-4f04-9336-3244c40525c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.874556] env[61972]: DEBUG nova.compute.provider_tree [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.926788] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 798.930362] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Successfully created port: 8cabbfc8-9c80-40c9-b148-797be58447c5 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.011085] env[61972]: DEBUG nova.compute.manager [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Received event network-changed-1cb467c5-dc03-4a5b-b564-db26df5763a3 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 799.011284] env[61972]: DEBUG nova.compute.manager [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Refreshing instance network info cache due to event network-changed-1cb467c5-dc03-4a5b-b564-db26df5763a3. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 799.011476] env[61972]: DEBUG oslo_concurrency.lockutils [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] Acquiring lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.198705] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Successfully created port: 10079de4-f81f-4577-8d45-ea1750322e95 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.365705] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Releasing lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.366173] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 799.366368] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.366695] env[61972]: DEBUG oslo_concurrency.lockutils [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] Acquired lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.366869] env[61972]: DEBUG nova.network.neutron [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Refreshing network info cache for port 1cb467c5-dc03-4a5b-b564-db26df5763a3 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 799.367955] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1afd7bd6-32c6-4ac3-a818-ced1513130ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.378432] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3a1e8c-8511-4e88-b88d-aa84c1ab373e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.389035] env[61972]: DEBUG nova.scheduler.client.report [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 799.404473] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47bd9677-375a-413b-a5c5-989d491adec9 could not be found. [ 799.404689] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 799.404870] env[61972]: INFO nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 799.405122] env[61972]: DEBUG oslo.service.loopingcall [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.405840] env[61972]: DEBUG nova.compute.manager [-] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 799.405946] env[61972]: DEBUG nova.network.neutron [-] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 799.423734] env[61972]: DEBUG nova.network.neutron [-] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.893637] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.978s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.894723] env[61972]: ERROR nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Traceback (most recent call last): [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self.driver.spawn(context, instance, image_meta, [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] vm_ref = self.build_virtual_machine(instance, [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] vif_infos = vmwarevif.get_vif_info(self._session, [ 799.894723] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] for vif in network_info: [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return self._sync_wrapper(fn, *args, **kwargs) [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self.wait() [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self[:] = self._gt.wait() [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return self._exit_event.wait() [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] result = hub.switch() [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 799.895057] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return self.greenlet.switch() [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] result = function(*args, **kwargs) [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] return func(*args, **kwargs) [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] raise e [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] nwinfo = self.network_api.allocate_for_instance( [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] created_port_ids = self._update_ports_for_instance( [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] with excutils.save_and_reraise_exception(): [ 799.895347] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] self.force_reraise() [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] raise self.value [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] updated_port = self._update_port( [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] _ensure_no_port_binding_failure(port) [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] raise exception.PortBindingFailed(port_id=port['id']) [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] nova.exception.PortBindingFailed: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. [ 799.895670] env[61972]: ERROR nova.compute.manager [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] [ 799.895919] env[61972]: DEBUG nova.compute.utils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 799.896607] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.662s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.899466] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Build of instance 6cda8874-6af5-490a-b9a2-323992265eb4 was re-scheduled: Binding failed for port 05eb5f0f-024b-4404-81a8-f5f8246a82ff, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 799.899903] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 799.900625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquiring lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.900625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Acquired lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.900777] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.902145] env[61972]: DEBUG nova.network.neutron [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.926113] env[61972]: DEBUG nova.network.neutron [-] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.936467] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 799.968865] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.969160] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.969336] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.969521] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.969665] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.969807] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.970019] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.970564] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.970770] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.970942] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.971137] env[61972]: DEBUG nova.virt.hardware [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.972011] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18209d7-b5ea-4c22-bcd2-291048d74779 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.982952] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb968bf-da44-420a-9aaa-47c0fd547a6c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.055565] env[61972]: DEBUG nova.network.neutron [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.385441] env[61972]: ERROR nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 800.385441] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 800.385441] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.385441] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.385441] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.385441] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.385441] env[61972]: ERROR nova.compute.manager raise self.value [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.385441] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 800.385441] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.385441] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 800.386106] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.386106] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 800.386106] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 800.386106] env[61972]: ERROR nova.compute.manager [ 800.386106] env[61972]: Traceback (most recent call last): [ 800.386106] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 800.386106] env[61972]: listener.cb(fileno) [ 800.386106] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.386106] env[61972]: result = function(*args, **kwargs) [ 800.386106] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 800.386106] env[61972]: return func(*args, **kwargs) [ 800.386106] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 800.386106] env[61972]: raise e [ 800.386106] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 800.386106] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 800.386106] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.386106] env[61972]: created_port_ids = self._update_ports_for_instance( [ 800.386106] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.386106] env[61972]: with excutils.save_and_reraise_exception(): [ 800.386106] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.386106] env[61972]: self.force_reraise() [ 800.386106] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.386106] env[61972]: raise self.value [ 800.386106] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.386106] env[61972]: updated_port = self._update_port( [ 800.386106] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.386106] env[61972]: _ensure_no_port_binding_failure(port) [ 800.386106] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.386106] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 800.386903] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 800.386903] env[61972]: Removing descriptor: 19 [ 800.386903] env[61972]: ERROR nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] Traceback (most recent call last): [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] yield resources [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self.driver.spawn(context, instance, image_meta, [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self._vmops.spawn(context, instance, image_meta, injected_files, [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 800.386903] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] vm_ref = self.build_virtual_machine(instance, [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] vif_infos = vmwarevif.get_vif_info(self._session, [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] for vif in network_info: [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return self._sync_wrapper(fn, *args, **kwargs) [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self.wait() [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self[:] = self._gt.wait() [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return self._exit_event.wait() [ 800.387225] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] result = hub.switch() [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return self.greenlet.switch() [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] result = function(*args, **kwargs) [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return func(*args, **kwargs) [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] raise e [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] nwinfo = self.network_api.allocate_for_instance( [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.387579] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] created_port_ids = self._update_ports_for_instance( [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] with excutils.save_and_reraise_exception(): [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self.force_reraise() [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] raise self.value [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] updated_port = self._update_port( [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] _ensure_no_port_binding_failure(port) [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.387894] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] raise exception.PortBindingFailed(port_id=port['id']) [ 800.388229] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 800.388229] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] [ 800.388229] env[61972]: INFO nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Terminating instance [ 800.427445] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.429408] env[61972]: INFO nova.compute.manager [-] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Took 1.02 seconds to deallocate network for instance. [ 800.433881] env[61972]: DEBUG nova.compute.claims [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 800.434079] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.519195] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.560016] env[61972]: DEBUG oslo_concurrency.lockutils [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] Releasing lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.560328] env[61972]: DEBUG nova.compute.manager [req-f9acf121-98f7-46d4-9440-08b8407eb159 req-9abbce33-cee1-4c9a-85ef-c3839d8cf7c6 service nova] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Received event network-vif-deleted-1cb467c5-dc03-4a5b-b564-db26df5763a3 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 800.689528] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dd760d-3965-4b5a-89db-c24e63c57881 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.697484] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4250babb-fe5c-43f3-8bd3-5dc2d1e2502d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.726982] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b59c95c-60fb-4888-a5ce-00bcbdfe1856 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.734665] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964b2544-647a-4d04-817e-eeec2a639c3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.749153] env[61972]: DEBUG nova.compute.provider_tree [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.891160] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.891345] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquired lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.891521] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.021570] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Releasing lock "refresh_cache-6cda8874-6af5-490a-b9a2-323992265eb4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.021710] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 801.021887] env[61972]: DEBUG nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 801.022067] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.043263] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.123274] env[61972]: DEBUG nova.compute.manager [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] [instance: a5a78743-e155-4ded-854e-822976192097] Received event network-changed-8cabbfc8-9c80-40c9-b148-797be58447c5 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 801.123413] env[61972]: DEBUG nova.compute.manager [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] [instance: a5a78743-e155-4ded-854e-822976192097] Refreshing instance network info cache due to event network-changed-8cabbfc8-9c80-40c9-b148-797be58447c5. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 801.123569] env[61972]: DEBUG oslo_concurrency.lockutils [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] Acquiring lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.252294] env[61972]: DEBUG nova.scheduler.client.report [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 801.420842] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.502843] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.545288] env[61972]: DEBUG nova.network.neutron [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.757201] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.757863] env[61972]: ERROR nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Traceback (most recent call last): [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self.driver.spawn(context, instance, image_meta, [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] vm_ref = self.build_virtual_machine(instance, [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] vif_infos = vmwarevif.get_vif_info(self._session, [ 801.757863] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] for vif in network_info: [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return self._sync_wrapper(fn, *args, **kwargs) [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self.wait() [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self[:] = self._gt.wait() [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return self._exit_event.wait() [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] result = hub.switch() [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 801.758173] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return self.greenlet.switch() [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] result = function(*args, **kwargs) [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] return func(*args, **kwargs) [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] raise e [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] nwinfo = self.network_api.allocate_for_instance( [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] created_port_ids = self._update_ports_for_instance( [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] with excutils.save_and_reraise_exception(): [ 801.758471] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] self.force_reraise() [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] raise self.value [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] updated_port = self._update_port( [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] _ensure_no_port_binding_failure(port) [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] raise exception.PortBindingFailed(port_id=port['id']) [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] nova.exception.PortBindingFailed: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. [ 801.758760] env[61972]: ERROR nova.compute.manager [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] [ 801.759128] env[61972]: DEBUG nova.compute.utils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 801.759823] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.256s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.760015] env[61972]: DEBUG nova.objects.instance [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61972) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 801.762875] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Build of instance 5b7223bd-66f3-44ec-b3bc-e9072eca515e was re-scheduled: Binding failed for port 738272d9-ef01-458b-b987-d513d12f7c81, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 801.763317] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 801.763550] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.763691] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.763848] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.005584] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Releasing lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.006027] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 802.006238] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.006556] env[61972]: DEBUG oslo_concurrency.lockutils [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] Acquired lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.006725] env[61972]: DEBUG nova.network.neutron [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] [instance: a5a78743-e155-4ded-854e-822976192097] Refreshing network info cache for port 8cabbfc8-9c80-40c9-b148-797be58447c5 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.008081] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40ea9eb0-0dfd-4688-bab3-da6281049d55 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.017360] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933945f0-859c-412f-b86b-5f99e205004f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.039370] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a5a78743-e155-4ded-854e-822976192097 could not be found. [ 802.039592] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.039767] env[61972]: INFO nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Took 0.03 seconds to destroy the instance on the hypervisor. [ 802.039999] env[61972]: DEBUG oslo.service.loopingcall [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.040227] env[61972]: DEBUG nova.compute.manager [-] [instance: a5a78743-e155-4ded-854e-822976192097] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 802.040321] env[61972]: DEBUG nova.network.neutron [-] [instance: a5a78743-e155-4ded-854e-822976192097] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.047429] env[61972]: INFO nova.compute.manager [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] [instance: 6cda8874-6af5-490a-b9a2-323992265eb4] Took 1.03 seconds to deallocate network for instance. [ 802.090240] env[61972]: DEBUG nova.network.neutron [-] [instance: a5a78743-e155-4ded-854e-822976192097] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.297160] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.441050] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.526517] env[61972]: DEBUG nova.network.neutron [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] [instance: a5a78743-e155-4ded-854e-822976192097] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.621010] env[61972]: DEBUG nova.network.neutron [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] [instance: a5a78743-e155-4ded-854e-822976192097] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.773708] env[61972]: DEBUG oslo_concurrency.lockutils [None req-612fb57f-61f7-487f-820d-c81fcc2a7bee tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.774801] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.308s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.775042] env[61972]: DEBUG nova.objects.instance [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lazy-loading 'resources' on Instance uuid 036a2dfc-615d-410a-8a3f-32de621879c2 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.917461] env[61972]: DEBUG nova.network.neutron [-] [instance: a5a78743-e155-4ded-854e-822976192097] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.943326] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-5b7223bd-66f3-44ec-b3bc-e9072eca515e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.943468] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 802.943605] env[61972]: DEBUG nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 802.943772] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.959957] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.083308] env[61972]: INFO nova.scheduler.client.report [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Deleted allocations for instance 6cda8874-6af5-490a-b9a2-323992265eb4 [ 803.123336] env[61972]: DEBUG oslo_concurrency.lockutils [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] Releasing lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.123701] env[61972]: DEBUG nova.compute.manager [req-91957ed5-c67b-4b1b-acea-5d939d5b71e9 req-86be55e6-0b0f-44c7-8b05-299c44415081 service nova] [instance: a5a78743-e155-4ded-854e-822976192097] Received event network-vif-deleted-8cabbfc8-9c80-40c9-b148-797be58447c5 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 803.421128] env[61972]: INFO nova.compute.manager [-] [instance: a5a78743-e155-4ded-854e-822976192097] Took 1.38 seconds to deallocate network for instance. [ 803.423413] env[61972]: DEBUG nova.compute.claims [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 803.423583] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.462636] env[61972]: DEBUG nova.network.neutron [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.525043] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb4d116-f92d-429d-9bc8-bc5a2c9263af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.532922] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50ec634-4c21-4b23-8a8f-49d05e1a57ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.561417] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc748e8c-9150-42d5-b410-82e67ed7078e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.568954] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43852a15-b84e-47cc-ab7d-42e6466b003f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.583166] env[61972]: DEBUG nova.compute.provider_tree [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.590777] env[61972]: DEBUG oslo_concurrency.lockutils [None req-857fbe5e-0f6c-456f-9d55-71184c6b2a43 tempest-SecurityGroupsTestJSON-580693667 tempest-SecurityGroupsTestJSON-580693667-project-member] Lock "6cda8874-6af5-490a-b9a2-323992265eb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 160.915s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.966602] env[61972]: INFO nova.compute.manager [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 5b7223bd-66f3-44ec-b3bc-e9072eca515e] Took 1.02 seconds to deallocate network for instance. [ 804.086052] env[61972]: DEBUG nova.scheduler.client.report [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 804.093555] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 804.590676] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.593121] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.539s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.595049] env[61972]: INFO nova.compute.claims [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.615905] env[61972]: INFO nova.scheduler.client.report [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Deleted allocations for instance 036a2dfc-615d-410a-8a3f-32de621879c2 [ 804.621082] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.992971] env[61972]: INFO nova.scheduler.client.report [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocations for instance 5b7223bd-66f3-44ec-b3bc-e9072eca515e [ 805.125107] env[61972]: DEBUG oslo_concurrency.lockutils [None req-31bf2a86-9f25-4500-a940-bec7b2b381b0 tempest-ServerShowV257Test-1934604961 tempest-ServerShowV257Test-1934604961-project-member] Lock "036a2dfc-615d-410a-8a3f-32de621879c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.133s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.504270] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aa2a6a2e-f0ee-4728-981e-aa1d33ea1852 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "5b7223bd-66f3-44ec-b3bc-e9072eca515e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 160.120s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.879517] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da77c8b7-3ea1-45ba-9867-276c8490f9bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.887275] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3260fa-04f8-497a-b01d-e1f2fa363dcb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.917397] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc13a15-e7f2-4707-890d-e24237ab7699 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.924725] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086fa9fe-9492-41fc-b060-6216b1d63829 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.938657] env[61972]: DEBUG nova.compute.provider_tree [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.006610] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 806.443170] env[61972]: DEBUG nova.scheduler.client.report [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 806.533717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.765843] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "942b00ba-a615-452d-a0c1-633d48d73fd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.766114] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.948432] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.948973] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 806.951872] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.698s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.456687] env[61972]: DEBUG nova.compute.utils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 807.461526] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 807.461656] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 807.515691] env[61972]: DEBUG nova.policy [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91f140eb617e4136a765c5c15978643e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6c46447bf904572b78f673fc04de853', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 807.753361] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fce8c9-5c93-4c1e-b735-d5bf66ecfbbf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.761120] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316643d5-8b4f-43bd-abd0-dbd639c70657 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.791920] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cac00f-5ba9-4903-84bb-d9f7811af67a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.799195] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6189a3fe-7e81-4225-9d37-29da1abd69d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.813360] env[61972]: DEBUG nova.compute.provider_tree [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.850543] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Successfully created port: 63240ad1-b41a-445d-8fa3-266e0b7628a6 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.962371] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 808.314705] env[61972]: DEBUG nova.scheduler.client.report [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 808.819781] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.868s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.821020] env[61972]: ERROR nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Traceback (most recent call last): [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self.driver.spawn(context, instance, image_meta, [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] vm_ref = self.build_virtual_machine(instance, [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 808.821020] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] for vif in network_info: [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return self._sync_wrapper(fn, *args, **kwargs) [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self.wait() [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self[:] = self._gt.wait() [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return self._exit_event.wait() [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] result = hub.switch() [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 808.821382] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return self.greenlet.switch() [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] result = function(*args, **kwargs) [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] return func(*args, **kwargs) [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] raise e [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] nwinfo = self.network_api.allocate_for_instance( [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] created_port_ids = self._update_ports_for_instance( [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] with excutils.save_and_reraise_exception(): [ 808.821776] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] self.force_reraise() [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] raise self.value [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] updated_port = self._update_port( [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] _ensure_no_port_binding_failure(port) [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] raise exception.PortBindingFailed(port_id=port['id']) [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] nova.exception.PortBindingFailed: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. [ 808.822150] env[61972]: ERROR nova.compute.manager [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] [ 808.822554] env[61972]: DEBUG nova.compute.utils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 808.825593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.167s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.826088] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Build of instance dab76349-85ba-4513-afa7-d9a33da1b1fe was re-scheduled: Binding failed for port a9245a41-0f57-44b9-a0e5-84924344fcfb, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 808.826607] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 808.829343] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.829343] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquired lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.829343] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 808.950854] env[61972]: DEBUG nova.compute.manager [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Received event network-changed-63240ad1-b41a-445d-8fa3-266e0b7628a6 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.950999] env[61972]: DEBUG nova.compute.manager [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Refreshing instance network info cache due to event network-changed-63240ad1-b41a-445d-8fa3-266e0b7628a6. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 808.951229] env[61972]: DEBUG oslo_concurrency.lockutils [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] Acquiring lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.951368] env[61972]: DEBUG oslo_concurrency.lockutils [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] Acquired lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.951528] env[61972]: DEBUG nova.network.neutron [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Refreshing network info cache for port 63240ad1-b41a-445d-8fa3-266e0b7628a6 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.971545] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 809.003036] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 809.003588] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 809.003796] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.003990] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 809.004202] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.004362] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 809.004571] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 809.004727] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 809.004891] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 809.005063] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 809.005237] env[61972]: DEBUG nova.virt.hardware [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.006119] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b9778f-da43-4ebf-a640-9e41d7d5bfe1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.014098] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b67539-2ad0-431d-b001-db3fcb40aa04 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.065787] env[61972]: ERROR nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 809.065787] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 809.065787] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 809.065787] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 809.065787] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.065787] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.065787] env[61972]: ERROR nova.compute.manager raise self.value [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 809.065787] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 809.065787] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.065787] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 809.066975] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.066975] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 809.066975] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 809.066975] env[61972]: ERROR nova.compute.manager [ 809.066975] env[61972]: Traceback (most recent call last): [ 809.066975] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 809.066975] env[61972]: listener.cb(fileno) [ 809.066975] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.066975] env[61972]: result = function(*args, **kwargs) [ 809.066975] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 809.066975] env[61972]: return func(*args, **kwargs) [ 809.066975] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 809.066975] env[61972]: raise e [ 809.066975] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 809.066975] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 809.066975] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 809.066975] env[61972]: created_port_ids = self._update_ports_for_instance( [ 809.066975] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 809.066975] env[61972]: with excutils.save_and_reraise_exception(): [ 809.066975] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.066975] env[61972]: self.force_reraise() [ 809.066975] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.066975] env[61972]: raise self.value [ 809.066975] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 809.066975] env[61972]: updated_port = self._update_port( [ 809.066975] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.066975] env[61972]: _ensure_no_port_binding_failure(port) [ 809.066975] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.066975] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 809.068078] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 809.068078] env[61972]: Removing descriptor: 21 [ 809.068078] env[61972]: ERROR nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Traceback (most recent call last): [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] yield resources [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self.driver.spawn(context, instance, image_meta, [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self._vmops.spawn(context, instance, image_meta, injected_files, [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 809.068078] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] vm_ref = self.build_virtual_machine(instance, [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] vif_infos = vmwarevif.get_vif_info(self._session, [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] for vif in network_info: [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return self._sync_wrapper(fn, *args, **kwargs) [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self.wait() [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self[:] = self._gt.wait() [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return self._exit_event.wait() [ 809.068482] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] result = hub.switch() [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return self.greenlet.switch() [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] result = function(*args, **kwargs) [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return func(*args, **kwargs) [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] raise e [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] nwinfo = self.network_api.allocate_for_instance( [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 809.068808] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] created_port_ids = self._update_ports_for_instance( [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] with excutils.save_and_reraise_exception(): [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self.force_reraise() [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] raise self.value [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] updated_port = self._update_port( [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] _ensure_no_port_binding_failure(port) [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.069155] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] raise exception.PortBindingFailed(port_id=port['id']) [ 809.069647] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 809.069647] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] [ 809.069647] env[61972]: INFO nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Terminating instance [ 809.352623] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.481210] env[61972]: DEBUG nova.network.neutron [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.545845] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.571974] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquiring lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.631948] env[61972]: DEBUG nova.network.neutron [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.668019] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83fa0e1-b92f-4248-9b70-4546c0e1884e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.674621] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2cc2233-529b-43f8-96fb-c371f897f0a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.705171] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6ea7b4-c0b9-454e-8a89-4ecc7a013d94 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.712960] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4380a413-4c47-4122-8b7e-4af8013aa22a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.725984] env[61972]: DEBUG nova.compute.provider_tree [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.050115] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Releasing lock "refresh_cache-dab76349-85ba-4513-afa7-d9a33da1b1fe" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.050395] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 810.052616] env[61972]: DEBUG nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 810.052616] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.073822] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.133960] env[61972]: DEBUG oslo_concurrency.lockutils [req-4c52f130-3ccc-425b-8b10-96bd521260c1 req-a8c72531-bcb4-46bc-b497-5c3af627e538 service nova] Releasing lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.134589] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquired lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.134842] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 810.233019] env[61972]: DEBUG nova.scheduler.client.report [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 810.366679] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.366679] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.578662] env[61972]: DEBUG nova.network.neutron [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.652835] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.728778] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.735951] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.913s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.736566] env[61972]: ERROR nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] Traceback (most recent call last): [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self.driver.spawn(context, instance, image_meta, [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] vm_ref = self.build_virtual_machine(instance, [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.736566] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] for vif in network_info: [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return self._sync_wrapper(fn, *args, **kwargs) [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self.wait() [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self[:] = self._gt.wait() [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return self._exit_event.wait() [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] result = hub.switch() [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 810.736873] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return self.greenlet.switch() [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] result = function(*args, **kwargs) [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] return func(*args, **kwargs) [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] raise e [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] nwinfo = self.network_api.allocate_for_instance( [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] created_port_ids = self._update_ports_for_instance( [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] with excutils.save_and_reraise_exception(): [ 810.737216] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] self.force_reraise() [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] raise self.value [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] updated_port = self._update_port( [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] _ensure_no_port_binding_failure(port) [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] raise exception.PortBindingFailed(port_id=port['id']) [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] nova.exception.PortBindingFailed: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. [ 810.737582] env[61972]: ERROR nova.compute.manager [instance: b986f147-a782-467c-92d1-bffb6a50c450] [ 810.737924] env[61972]: DEBUG nova.compute.utils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 810.738766] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.285s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.745018] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Build of instance b986f147-a782-467c-92d1-bffb6a50c450 was re-scheduled: Binding failed for port 8f874ab6-1327-43ae-b5c7-23a5820b18c1, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 810.745018] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 810.745018] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquiring lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.745018] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Acquired lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.745246] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.017952] env[61972]: DEBUG nova.compute.manager [req-96b6de31-fb2f-4877-8cf2-63795a9c407e req-4b9c4b6e-4db2-4d24-93bc-5883b502f5a1 service nova] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Received event network-vif-deleted-63240ad1-b41a-445d-8fa3-266e0b7628a6 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 811.081667] env[61972]: INFO nova.compute.manager [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: dab76349-85ba-4513-afa7-d9a33da1b1fe] Took 1.03 seconds to deallocate network for instance. [ 811.231071] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Releasing lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.231468] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 811.231669] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.231983] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-990aaab2-e73e-4a66-ad06-ddd29dec9f55 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.241648] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d983a0-c6a9-4af7-aa13-5d5a1b43f45c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.267647] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67ecabfd-4efc-4e1c-a708-107197cfd018 could not be found. [ 811.268031] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.268109] env[61972]: INFO nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Took 0.04 seconds to destroy the instance on the hypervisor. [ 811.268303] env[61972]: DEBUG oslo.service.loopingcall [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.269059] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.270664] env[61972]: DEBUG nova.compute.manager [-] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 811.270781] env[61972]: DEBUG nova.network.neutron [-] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.293196] env[61972]: DEBUG nova.network.neutron [-] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.349886] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.498237] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b0658f-636d-40ee-800a-912fe2bf1620 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.505394] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2692b423-bc89-4c3a-a154-e2a44e25c09b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.534392] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d547ae4-db88-4a43-9273-f6cd1d9ff7ba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.541808] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e95d7db-3bb5-45e9-9e84-f5b4c950b61f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.554873] env[61972]: DEBUG nova.compute.provider_tree [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.795038] env[61972]: DEBUG nova.network.neutron [-] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.852897] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Releasing lock "refresh_cache-b986f147-a782-467c-92d1-bffb6a50c450" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.853165] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 811.853347] env[61972]: DEBUG nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 811.853520] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.869041] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.058349] env[61972]: DEBUG nova.scheduler.client.report [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 812.109212] env[61972]: INFO nova.scheduler.client.report [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Deleted allocations for instance dab76349-85ba-4513-afa7-d9a33da1b1fe [ 812.297825] env[61972]: INFO nova.compute.manager [-] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Took 1.03 seconds to deallocate network for instance. [ 812.300197] env[61972]: DEBUG nova.compute.claims [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 812.300452] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.373203] env[61972]: DEBUG nova.network.neutron [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.565798] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.827s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.566442] env[61972]: ERROR nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Traceback (most recent call last): [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self.driver.spawn(context, instance, image_meta, [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] vm_ref = self.build_virtual_machine(instance, [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.566442] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] for vif in network_info: [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] return self._sync_wrapper(fn, *args, **kwargs) [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self.wait() [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self[:] = self._gt.wait() [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] return self._exit_event.wait() [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] current.throw(*self._exc) [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 812.566798] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] result = function(*args, **kwargs) [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] return func(*args, **kwargs) [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] raise e [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] nwinfo = self.network_api.allocate_for_instance( [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] created_port_ids = self._update_ports_for_instance( [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] with excutils.save_and_reraise_exception(): [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] self.force_reraise() [ 812.567161] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] raise self.value [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] updated_port = self._update_port( [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] _ensure_no_port_binding_failure(port) [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] raise exception.PortBindingFailed(port_id=port['id']) [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] nova.exception.PortBindingFailed: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. [ 812.567528] env[61972]: ERROR nova.compute.manager [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] [ 812.567528] env[61972]: DEBUG nova.compute.utils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.568485] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.149s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.571663] env[61972]: INFO nova.compute.claims [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 812.574294] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Build of instance 21c83740-56b6-4cc8-b97b-2b7a00380b91 was re-scheduled: Binding failed for port 3f5337a6-a41f-4b7e-b0fb-69b42d57c945, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 812.574701] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 812.574982] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquiring lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.575183] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Acquired lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.575347] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.616799] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2d9888c2-5aef-4062-81bc-37bfc89bdabb tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "dab76349-85ba-4513-afa7-d9a33da1b1fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 164.346s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.875576] env[61972]: INFO nova.compute.manager [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] [instance: b986f147-a782-467c-92d1-bffb6a50c450] Took 1.02 seconds to deallocate network for instance. [ 813.094465] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.119618] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 813.231997] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.638635] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.734225] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Releasing lock "refresh_cache-21c83740-56b6-4cc8-b97b-2b7a00380b91" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.734510] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 813.734728] env[61972]: DEBUG nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 813.734943] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.749026] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.824682] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a2ea76-7d46-453b-8c20-12d0bd941663 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.832282] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df45e33b-9fa2-43a7-a5df-7e25d3644465 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.861335] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a759cb-51d8-47e3-9d94-69f9d79c05a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.868053] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680659bb-e516-484a-8795-2917a36acc54 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.880449] env[61972]: DEBUG nova.compute.provider_tree [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.900036] env[61972]: INFO nova.scheduler.client.report [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Deleted allocations for instance b986f147-a782-467c-92d1-bffb6a50c450 [ 814.253032] env[61972]: DEBUG nova.network.neutron [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.384472] env[61972]: DEBUG nova.scheduler.client.report [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 814.410846] env[61972]: DEBUG oslo_concurrency.lockutils [None req-296ad947-f92c-4c1f-9eb3-a7fab6c85494 tempest-AttachInterfacesTestJSON-1708409081 tempest-AttachInterfacesTestJSON-1708409081-project-member] Lock "b986f147-a782-467c-92d1-bffb6a50c450" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 165.662s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.755771] env[61972]: INFO nova.compute.manager [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] [instance: 21c83740-56b6-4cc8-b97b-2b7a00380b91] Took 1.02 seconds to deallocate network for instance. [ 814.889523] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.890060] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 814.892671] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.484s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.914056] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 815.397572] env[61972]: DEBUG nova.compute.utils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 815.402327] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 815.402544] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 815.434800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.485782] env[61972]: DEBUG nova.policy [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '232b9128bfd94a52a5f9e340e821a0b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f4e99fb03d94876b605badde8dee459', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 815.696467] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7178753f-dbf1-482a-8829-c8c337e3ea8a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.704064] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583ec3ad-8d3f-4128-bbf4-bfef4ddadf85 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.736196] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab556c16-bf69-429a-9943-8277f27ea6ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.743851] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e67507-c6ab-44e2-aff6-858874f13a3d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.758822] env[61972]: DEBUG nova.compute.provider_tree [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.772709] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Successfully created port: d6f761a0-58d8-4194-a7aa-aebf39902f1b {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.785893] env[61972]: INFO nova.scheduler.client.report [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Deleted allocations for instance 21c83740-56b6-4cc8-b97b-2b7a00380b91 [ 815.903368] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 816.265501] env[61972]: DEBUG nova.scheduler.client.report [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 816.298096] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9299c6d2-d04a-4fda-8be6-8bc343ccb8a6 tempest-InstanceActionsTestJSON-1028660373 tempest-InstanceActionsTestJSON-1028660373-project-member] Lock "21c83740-56b6-4cc8-b97b-2b7a00380b91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 167.497s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.746341] env[61972]: DEBUG nova.compute.manager [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Received event network-changed-d6f761a0-58d8-4194-a7aa-aebf39902f1b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 816.746341] env[61972]: DEBUG nova.compute.manager [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Refreshing instance network info cache due to event network-changed-d6f761a0-58d8-4194-a7aa-aebf39902f1b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 816.746630] env[61972]: DEBUG oslo_concurrency.lockutils [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] Acquiring lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.746733] env[61972]: DEBUG oslo_concurrency.lockutils [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] Acquired lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.746894] env[61972]: DEBUG nova.network.neutron [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Refreshing network info cache for port d6f761a0-58d8-4194-a7aa-aebf39902f1b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.769781] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.877s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.770924] env[61972]: ERROR nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Traceback (most recent call last): [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self.driver.spawn(context, instance, image_meta, [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self._vmops.spawn(context, instance, image_meta, injected_files, [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] vm_ref = self.build_virtual_machine(instance, [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] vif_infos = vmwarevif.get_vif_info(self._session, [ 816.770924] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] for vif in network_info: [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return self._sync_wrapper(fn, *args, **kwargs) [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self.wait() [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self[:] = self._gt.wait() [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return self._exit_event.wait() [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] result = hub.switch() [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 816.771601] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return self.greenlet.switch() [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] result = function(*args, **kwargs) [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] return func(*args, **kwargs) [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] raise e [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] nwinfo = self.network_api.allocate_for_instance( [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] created_port_ids = self._update_ports_for_instance( [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] with excutils.save_and_reraise_exception(): [ 816.771946] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] self.force_reraise() [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] raise self.value [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] updated_port = self._update_port( [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] _ensure_no_port_binding_failure(port) [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] raise exception.PortBindingFailed(port_id=port['id']) [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] nova.exception.PortBindingFailed: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. [ 816.772358] env[61972]: ERROR nova.compute.manager [instance: 479b311e-e027-4724-bd8b-dffa8903b538] [ 816.772656] env[61972]: DEBUG nova.compute.utils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 816.772656] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.338s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.776185] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Build of instance 479b311e-e027-4724-bd8b-dffa8903b538 was re-scheduled: Binding failed for port 9ebe655a-cf1b-4918-b8ba-6bf0bf3def46, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 816.776632] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 816.776856] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.777012] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquired lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.777176] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.798833] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 816.915794] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 816.944227] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:12:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c0ef33c5-9290-494f-ab93-36c0c47bcca8',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-162642782',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 816.944487] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 816.944670] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.944872] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 816.945096] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.945280] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 816.945495] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 816.945653] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 816.945816] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 816.945973] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 816.946156] env[61972]: DEBUG nova.virt.hardware [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.947026] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e43186-e6e9-4e4d-a918-181c09f14f34 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.955368] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce286456-e7ec-442e-8fd6-e8b161630952 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.059421] env[61972]: ERROR nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 817.059421] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 817.059421] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 817.059421] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 817.059421] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.059421] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.059421] env[61972]: ERROR nova.compute.manager raise self.value [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 817.059421] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 817.059421] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.059421] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 817.059856] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.059856] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 817.059856] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 817.059856] env[61972]: ERROR nova.compute.manager [ 817.059856] env[61972]: Traceback (most recent call last): [ 817.059856] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 817.059856] env[61972]: listener.cb(fileno) [ 817.059856] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 817.059856] env[61972]: result = function(*args, **kwargs) [ 817.059856] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 817.059856] env[61972]: return func(*args, **kwargs) [ 817.059856] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 817.059856] env[61972]: raise e [ 817.059856] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 817.059856] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 817.059856] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 817.059856] env[61972]: created_port_ids = self._update_ports_for_instance( [ 817.059856] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 817.059856] env[61972]: with excutils.save_and_reraise_exception(): [ 817.059856] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.059856] env[61972]: self.force_reraise() [ 817.059856] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.059856] env[61972]: raise self.value [ 817.059856] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 817.059856] env[61972]: updated_port = self._update_port( [ 817.059856] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.059856] env[61972]: _ensure_no_port_binding_failure(port) [ 817.059856] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.059856] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 817.060714] env[61972]: nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 817.060714] env[61972]: Removing descriptor: 19 [ 817.060714] env[61972]: ERROR nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Traceback (most recent call last): [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] yield resources [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self.driver.spawn(context, instance, image_meta, [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 817.060714] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] vm_ref = self.build_virtual_machine(instance, [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] vif_infos = vmwarevif.get_vif_info(self._session, [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] for vif in network_info: [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return self._sync_wrapper(fn, *args, **kwargs) [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self.wait() [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self[:] = self._gt.wait() [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return self._exit_event.wait() [ 817.061188] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] result = hub.switch() [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return self.greenlet.switch() [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] result = function(*args, **kwargs) [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return func(*args, **kwargs) [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] raise e [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] nwinfo = self.network_api.allocate_for_instance( [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 817.061599] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] created_port_ids = self._update_ports_for_instance( [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] with excutils.save_and_reraise_exception(): [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self.force_reraise() [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] raise self.value [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] updated_port = self._update_port( [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] _ensure_no_port_binding_failure(port) [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.062076] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] raise exception.PortBindingFailed(port_id=port['id']) [ 817.062403] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 817.062403] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] [ 817.062403] env[61972]: INFO nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Terminating instance [ 817.316446] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.443817] env[61972]: DEBUG nova.network.neutron [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.515531] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.564939] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.593073] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a72aca-b3c5-4db5-9a6c-259cf0f530d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.604164] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cecc8f-408d-4b4c-8d00-2287006cc838 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.637202] env[61972]: DEBUG nova.network.neutron [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.639057] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02334955-1053-4812-8c72-0b77009a19a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.647111] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae6f7d5-07e4-4c07-9277-6b08603326d5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.661894] env[61972]: DEBUG nova.compute.provider_tree [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.663911] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.143179] env[61972]: DEBUG oslo_concurrency.lockutils [req-7a3696f0-604b-49b0-b6dc-7677f8e126db req-6b2152b3-d3c7-4fb6-94cc-f96806c6130e service nova] Releasing lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.143532] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquired lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.143731] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.166530] env[61972]: DEBUG nova.scheduler.client.report [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 818.170898] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Releasing lock "refresh_cache-479b311e-e027-4724-bd8b-dffa8903b538" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.171121] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 818.171304] env[61972]: DEBUG nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 818.171466] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.199741] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.678201] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.903s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.678201] env[61972]: ERROR nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Traceback (most recent call last): [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self.driver.spawn(context, instance, image_meta, [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 818.678201] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] vm_ref = self.build_virtual_machine(instance, [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] vif_infos = vmwarevif.get_vif_info(self._session, [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] for vif in network_info: [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return self._sync_wrapper(fn, *args, **kwargs) [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self.wait() [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self[:] = self._gt.wait() [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return self._exit_event.wait() [ 818.679035] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] result = hub.switch() [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return self.greenlet.switch() [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] result = function(*args, **kwargs) [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] return func(*args, **kwargs) [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] raise e [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] nwinfo = self.network_api.allocate_for_instance( [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.679445] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] created_port_ids = self._update_ports_for_instance( [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] with excutils.save_and_reraise_exception(): [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] self.force_reraise() [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] raise self.value [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] updated_port = self._update_port( [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] _ensure_no_port_binding_failure(port) [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.679751] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] raise exception.PortBindingFailed(port_id=port['id']) [ 818.680043] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] nova.exception.PortBindingFailed: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. [ 818.680043] env[61972]: ERROR nova.compute.manager [instance: 47bd9677-375a-413b-a5c5-989d491adec9] [ 818.680043] env[61972]: DEBUG nova.compute.utils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 818.681381] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.683088] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.259s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.686087] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Build of instance 47bd9677-375a-413b-a5c5-989d491adec9 was re-scheduled: Binding failed for port 1cb467c5-dc03-4a5b-b564-db26df5763a3, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 818.686544] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 818.686764] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquiring lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.686906] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Acquired lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.687156] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.703140] env[61972]: DEBUG nova.network.neutron [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.799437] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.816393] env[61972]: DEBUG nova.compute.manager [req-131499a1-82b2-4792-ab74-748a0aac5d0f req-5659328f-18d2-4de2-8b7c-250c62b5f4f8 service nova] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Received event network-vif-deleted-d6f761a0-58d8-4194-a7aa-aebf39902f1b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 819.205512] env[61972]: INFO nova.compute.manager [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 479b311e-e027-4724-bd8b-dffa8903b538] Took 1.03 seconds to deallocate network for instance. [ 819.223873] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.302081] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Releasing lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.302528] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 819.302753] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.303042] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6193baa-331d-4744-bb5e-6473f6bf72a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.312701] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3e364c-d3c1-4287-ae5e-8f468943ebaf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.340276] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49cd5798-1f76-4690-bea7-cebd98a84f5c could not be found. [ 819.340276] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.340276] env[61972]: INFO nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 819.341149] env[61972]: DEBUG oslo.service.loopingcall [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.342274] env[61972]: DEBUG nova.compute.manager [-] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 819.342274] env[61972]: DEBUG nova.network.neutron [-] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.393649] env[61972]: DEBUG nova.network.neutron [-] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.401294] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.506579] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f2586e-d80d-4d12-bfa2-d77c4049b9fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.514030] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac546c7c-add5-423d-b775-213b37fabd22 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.546076] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42844bf-2b91-48e8-920d-af1ecaf09e6a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.553535] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba171023-3f3e-48b7-bb6c-8e6378c8570d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.567876] env[61972]: DEBUG nova.compute.provider_tree [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.895599] env[61972]: DEBUG nova.network.neutron [-] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.904138] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Releasing lock "refresh_cache-47bd9677-375a-413b-a5c5-989d491adec9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.904436] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 819.904670] env[61972]: DEBUG nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 819.904877] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.942553] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.072108] env[61972]: DEBUG nova.scheduler.client.report [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 820.243016] env[61972]: INFO nova.scheduler.client.report [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Deleted allocations for instance 479b311e-e027-4724-bd8b-dffa8903b538 [ 820.398441] env[61972]: INFO nova.compute.manager [-] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Took 1.06 seconds to deallocate network for instance. [ 820.400691] env[61972]: DEBUG nova.compute.claims [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 820.400871] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.445354] env[61972]: DEBUG nova.network.neutron [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.578569] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.895s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.579248] env[61972]: ERROR nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] Traceback (most recent call last): [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self.driver.spawn(context, instance, image_meta, [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] vm_ref = self.build_virtual_machine(instance, [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] vif_infos = vmwarevif.get_vif_info(self._session, [ 820.579248] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] for vif in network_info: [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return self._sync_wrapper(fn, *args, **kwargs) [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self.wait() [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self[:] = self._gt.wait() [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return self._exit_event.wait() [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] result = hub.switch() [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 820.579580] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return self.greenlet.switch() [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] result = function(*args, **kwargs) [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] return func(*args, **kwargs) [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] raise e [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] nwinfo = self.network_api.allocate_for_instance( [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] created_port_ids = self._update_ports_for_instance( [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] with excutils.save_and_reraise_exception(): [ 820.580252] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] self.force_reraise() [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] raise self.value [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] updated_port = self._update_port( [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] _ensure_no_port_binding_failure(port) [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] raise exception.PortBindingFailed(port_id=port['id']) [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] nova.exception.PortBindingFailed: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. [ 820.580556] env[61972]: ERROR nova.compute.manager [instance: a5a78743-e155-4ded-854e-822976192097] [ 820.580823] env[61972]: DEBUG nova.compute.utils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 820.581553] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.960s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.582951] env[61972]: INFO nova.compute.claims [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.587012] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Build of instance a5a78743-e155-4ded-854e-822976192097 was re-scheduled: Binding failed for port 8cabbfc8-9c80-40c9-b148-797be58447c5, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 820.588733] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 820.588973] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquiring lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.589132] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Acquired lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.589291] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.749920] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3df7de5b-f69d-4a13-bded-f34b3bd95bb4 tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "479b311e-e027-4724-bd8b-dffa8903b538" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 171.699s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.948600] env[61972]: INFO nova.compute.manager [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] [instance: 47bd9677-375a-413b-a5c5-989d491adec9] Took 1.04 seconds to deallocate network for instance. [ 821.114318] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.227422] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.258170] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 821.730653] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Releasing lock "refresh_cache-a5a78743-e155-4ded-854e-822976192097" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.730717] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 821.730946] env[61972]: DEBUG nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 821.731185] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.760011] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.778482] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.821275] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "b9726bf4-a4b1-4b22-840f-98157d0d790c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.821700] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.896964] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c46bf7-4a5f-434b-8dbb-6ac3c6d9e008 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.904979] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1acac1-f8a2-46a6-8df5-2d65fc524551 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.939259] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66f0d11-9d61-4f62-9926-84a9e3a2f792 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.948670] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a882d6b-186c-4af9-a01a-2a7dcbefa0dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.967440] env[61972]: DEBUG nova.compute.provider_tree [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.988412] env[61972]: INFO nova.scheduler.client.report [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Deleted allocations for instance 47bd9677-375a-413b-a5c5-989d491adec9 [ 822.264168] env[61972]: DEBUG nova.network.neutron [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.474520] env[61972]: DEBUG nova.scheduler.client.report [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 822.496648] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5505eb91-9f71-4376-b8a1-eaf140e71c1a tempest-ListServerFiltersTestJSON-1433027604 tempest-ListServerFiltersTestJSON-1433027604-project-member] Lock "47bd9677-375a-413b-a5c5-989d491adec9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 172.490s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.767634] env[61972]: INFO nova.compute.manager [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] [instance: a5a78743-e155-4ded-854e-822976192097] Took 1.04 seconds to deallocate network for instance. [ 822.982018] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.982018] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 822.987458] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.454s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.989216] env[61972]: INFO nova.compute.claims [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.002147] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 823.192109] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "9a0463a0-dc96-41b1-8415-22011644ac0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.192385] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.495342] env[61972]: DEBUG nova.compute.utils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.497732] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 823.497909] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.528277] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.560615] env[61972]: DEBUG nova.policy [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4afa55e737b8432eb411261fd5a9bfdb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5413dae55db47e49e9a9f7966514683', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 823.805041] env[61972]: INFO nova.scheduler.client.report [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Deleted allocations for instance a5a78743-e155-4ded-854e-822976192097 [ 823.978362] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Successfully created port: 440d6213-231d-4449-bc26-8cac897fd0da {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.000988] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 824.317502] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1613badc-23b7-4831-ba42-ce92a600f3d7 tempest-ServersTestMultiNic-1607319719 tempest-ServersTestMultiNic-1607319719-project-member] Lock "a5a78743-e155-4ded-854e-822976192097" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 171.009s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.341191] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f289c6-0abc-4b2f-a0e5-7fb45689972c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.349858] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4084ca48-d622-4cd2-b2d2-a9849a70ca3f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.381260] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746c8db5-e084-4d6f-bce2-5776b9f5b908 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.389104] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9d0ec4-94ea-49a4-b661-cfed2da66dab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.403910] env[61972]: DEBUG nova.compute.provider_tree [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.823615] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 824.906845] env[61972]: DEBUG nova.scheduler.client.report [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 825.026054] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 825.056515] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 825.056789] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 825.056973] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.057206] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 825.057367] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.057534] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 825.057772] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 825.057960] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 825.058310] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 825.058527] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 825.058741] env[61972]: DEBUG nova.virt.hardware [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 825.060156] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee30b405-ec26-4412-aebd-8fe16889c0a5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.068718] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e080c1c4-b777-4b07-ab50-2b2610dd21ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.192403] env[61972]: DEBUG nova.compute.manager [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Received event network-changed-440d6213-231d-4449-bc26-8cac897fd0da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 825.192654] env[61972]: DEBUG nova.compute.manager [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Refreshing instance network info cache due to event network-changed-440d6213-231d-4449-bc26-8cac897fd0da. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 825.193057] env[61972]: DEBUG oslo_concurrency.lockutils [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] Acquiring lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.193175] env[61972]: DEBUG oslo_concurrency.lockutils [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] Acquired lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.193298] env[61972]: DEBUG nova.network.neutron [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Refreshing network info cache for port 440d6213-231d-4449-bc26-8cac897fd0da {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.357013] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.417838] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.418406] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 825.421657] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.121s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.539772] env[61972]: ERROR nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 825.539772] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 825.539772] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.539772] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.539772] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.539772] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.539772] env[61972]: ERROR nova.compute.manager raise self.value [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.539772] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 825.539772] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.539772] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 825.540308] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.540308] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 825.540308] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 825.540308] env[61972]: ERROR nova.compute.manager [ 825.540433] env[61972]: Traceback (most recent call last): [ 825.540433] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 825.540433] env[61972]: listener.cb(fileno) [ 825.540433] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.540433] env[61972]: result = function(*args, **kwargs) [ 825.540433] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 825.540433] env[61972]: return func(*args, **kwargs) [ 825.540433] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 825.540433] env[61972]: raise e [ 825.540433] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 825.540433] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 825.540433] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.540433] env[61972]: created_port_ids = self._update_ports_for_instance( [ 825.540433] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.540433] env[61972]: with excutils.save_and_reraise_exception(): [ 825.540433] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.540433] env[61972]: self.force_reraise() [ 825.540433] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.540433] env[61972]: raise self.value [ 825.540433] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.540433] env[61972]: updated_port = self._update_port( [ 825.540433] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.540433] env[61972]: _ensure_no_port_binding_failure(port) [ 825.540433] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.540433] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 825.541086] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 825.541086] env[61972]: Removing descriptor: 21 [ 825.541735] env[61972]: ERROR nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Traceback (most recent call last): [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] yield resources [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self.driver.spawn(context, instance, image_meta, [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] vm_ref = self.build_virtual_machine(instance, [ 825.541735] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] vif_infos = vmwarevif.get_vif_info(self._session, [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] for vif in network_info: [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return self._sync_wrapper(fn, *args, **kwargs) [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self.wait() [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self[:] = self._gt.wait() [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return self._exit_event.wait() [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 825.542488] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] result = hub.switch() [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return self.greenlet.switch() [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] result = function(*args, **kwargs) [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return func(*args, **kwargs) [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] raise e [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] nwinfo = self.network_api.allocate_for_instance( [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] created_port_ids = self._update_ports_for_instance( [ 825.542814] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] with excutils.save_and_reraise_exception(): [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self.force_reraise() [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] raise self.value [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] updated_port = self._update_port( [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] _ensure_no_port_binding_failure(port) [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] raise exception.PortBindingFailed(port_id=port['id']) [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 825.543220] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] [ 825.543544] env[61972]: INFO nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Terminating instance [ 825.715719] env[61972]: DEBUG nova.network.neutron [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.933981] env[61972]: DEBUG nova.network.neutron [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.944019] env[61972]: DEBUG nova.compute.utils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.944019] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 825.944019] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.992775] env[61972]: DEBUG nova.policy [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3254aac3e99d474e95798cb85f2bf5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9266fa0d01664ba4a80ff4068cb9b9bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 826.047728] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquiring lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.241968] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef43339-d986-444c-bad7-bf2fde311633 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.249717] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc5d81f-9a02-46d3-b86c-ede27fe20cbe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.284140] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b14c93-05dd-4c3b-9f6a-e95d2aaaaa01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.292448] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da09da8-3949-4e1b-acc9-2665e845bf88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.308956] env[61972]: DEBUG nova.compute.provider_tree [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.324320] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Successfully created port: 5242c0c7-f0be-48bd-9cdb-facff0c96198 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.443209] env[61972]: DEBUG oslo_concurrency.lockutils [req-641bf4db-783f-4308-9505-394d5bd28232 req-93ae4d89-53c5-497d-a2cc-3a8eee4a01b8 service nova] Releasing lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.447018] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquired lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.447018] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.447018] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 826.813149] env[61972]: DEBUG nova.scheduler.client.report [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 826.981324] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.129718] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.216613] env[61972]: DEBUG nova.compute.manager [req-2b97c502-a797-425d-a97d-476cf2a4a70e req-5128a842-34c0-4e90-9da1-b60409c564ff service nova] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Received event network-vif-deleted-440d6213-231d-4449-bc26-8cac897fd0da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 827.325770] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.904s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.326775] env[61972]: ERROR nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Traceback (most recent call last): [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self.driver.spawn(context, instance, image_meta, [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] vm_ref = self.build_virtual_machine(instance, [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.326775] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] for vif in network_info: [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return self._sync_wrapper(fn, *args, **kwargs) [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self.wait() [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self[:] = self._gt.wait() [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return self._exit_event.wait() [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] result = hub.switch() [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.327130] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return self.greenlet.switch() [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] result = function(*args, **kwargs) [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] return func(*args, **kwargs) [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] raise e [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] nwinfo = self.network_api.allocate_for_instance( [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] created_port_ids = self._update_ports_for_instance( [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] with excutils.save_and_reraise_exception(): [ 827.327435] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] self.force_reraise() [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] raise self.value [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] updated_port = self._update_port( [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] _ensure_no_port_binding_failure(port) [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] raise exception.PortBindingFailed(port_id=port['id']) [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] nova.exception.PortBindingFailed: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. [ 827.327737] env[61972]: ERROR nova.compute.manager [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] [ 827.328229] env[61972]: DEBUG nova.compute.utils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 827.332090] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.692s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.333107] env[61972]: INFO nova.compute.claims [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.338031] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Build of instance 67ecabfd-4efc-4e1c-a708-107197cfd018 was re-scheduled: Binding failed for port 63240ad1-b41a-445d-8fa3-266e0b7628a6, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 827.338809] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 827.343404] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquiring lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.343404] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Acquired lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.343538] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.459783] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 827.492888] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.493171] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.493354] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.493542] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.493858] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.493858] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.494795] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.495547] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.495547] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.495547] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.495756] env[61972]: DEBUG nova.virt.hardware [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.496568] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f41bdb9-d2b4-46c9-b5e2-d79f8933858a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.505261] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd59609-2560-4550-a1de-aa922ca75229 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.632964] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Releasing lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.633933] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 827.633933] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.634075] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e93278b0-446a-4a68-a2ff-6b2f3c94f8ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.646734] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9e9e8a-2587-4643-986f-788169fcaa03 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.672149] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a9a51b5-a8a5-4bda-a36c-682758f50745 could not be found. [ 827.672392] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.672632] env[61972]: INFO nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Took 0.04 seconds to destroy the instance on the hypervisor. [ 827.672897] env[61972]: DEBUG oslo.service.loopingcall [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.673150] env[61972]: DEBUG nova.compute.manager [-] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 827.673245] env[61972]: DEBUG nova.network.neutron [-] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.698150] env[61972]: DEBUG nova.network.neutron [-] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.710632] env[61972]: ERROR nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 827.710632] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 827.710632] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.710632] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.710632] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.710632] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.710632] env[61972]: ERROR nova.compute.manager raise self.value [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.710632] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 827.710632] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.710632] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 827.711076] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.711076] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 827.711076] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 827.711076] env[61972]: ERROR nova.compute.manager [ 827.711076] env[61972]: Traceback (most recent call last): [ 827.711076] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 827.711076] env[61972]: listener.cb(fileno) [ 827.711076] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.711076] env[61972]: result = function(*args, **kwargs) [ 827.711076] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.711076] env[61972]: return func(*args, **kwargs) [ 827.711076] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 827.711076] env[61972]: raise e [ 827.711076] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 827.711076] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 827.711076] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.711076] env[61972]: created_port_ids = self._update_ports_for_instance( [ 827.711076] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.711076] env[61972]: with excutils.save_and_reraise_exception(): [ 827.711076] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.711076] env[61972]: self.force_reraise() [ 827.711076] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.711076] env[61972]: raise self.value [ 827.711076] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.711076] env[61972]: updated_port = self._update_port( [ 827.711076] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.711076] env[61972]: _ensure_no_port_binding_failure(port) [ 827.711076] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.711076] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 827.711802] env[61972]: nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 827.711802] env[61972]: Removing descriptor: 21 [ 827.711802] env[61972]: ERROR nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Traceback (most recent call last): [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] yield resources [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self.driver.spawn(context, instance, image_meta, [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.711802] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] vm_ref = self.build_virtual_machine(instance, [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] for vif in network_info: [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return self._sync_wrapper(fn, *args, **kwargs) [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self.wait() [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self[:] = self._gt.wait() [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return self._exit_event.wait() [ 827.712166] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] result = hub.switch() [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return self.greenlet.switch() [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] result = function(*args, **kwargs) [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return func(*args, **kwargs) [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] raise e [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] nwinfo = self.network_api.allocate_for_instance( [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.712544] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] created_port_ids = self._update_ports_for_instance( [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] with excutils.save_and_reraise_exception(): [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self.force_reraise() [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] raise self.value [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] updated_port = self._update_port( [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] _ensure_no_port_binding_failure(port) [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.713014] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] raise exception.PortBindingFailed(port_id=port['id']) [ 827.713326] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 827.713326] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] [ 827.713326] env[61972]: INFO nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Terminating instance [ 827.866972] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.976095] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.204192] env[61972]: DEBUG nova.network.neutron [-] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.216959] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.217162] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.217350] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.331057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "489f1de0-d1c8-4429-a6f1-24ea885282f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.331298] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.478999] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Releasing lock "refresh_cache-67ecabfd-4efc-4e1c-a708-107197cfd018" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.478999] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 828.479159] env[61972]: DEBUG nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 828.479241] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.500060] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.641860] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1431e2b3-844e-4cba-b29b-bbe77e99186a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.649780] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4832908-e2a7-4e6e-95b6-c5bb7957a2a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.681014] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ec0695-e295-406c-9477-08d3a4756089 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.687524] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63867308-f4c4-4ebc-b816-57f5be0411db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.700340] env[61972]: DEBUG nova.compute.provider_tree [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.706389] env[61972]: INFO nova.compute.manager [-] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Took 1.03 seconds to deallocate network for instance. [ 828.708434] env[61972]: DEBUG nova.compute.claims [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 828.708611] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.747462] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.849463] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.008993] env[61972]: DEBUG nova.network.neutron [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.204068] env[61972]: DEBUG nova.scheduler.client.report [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 829.246016] env[61972]: DEBUG nova.compute.manager [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Received event network-changed-5242c0c7-f0be-48bd-9cdb-facff0c96198 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 829.246016] env[61972]: DEBUG nova.compute.manager [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Refreshing instance network info cache due to event network-changed-5242c0c7-f0be-48bd-9cdb-facff0c96198. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 829.246016] env[61972]: DEBUG oslo_concurrency.lockutils [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] Acquiring lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.352250] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.352716] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 829.352910] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 829.353240] env[61972]: DEBUG oslo_concurrency.lockutils [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] Acquired lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.353415] env[61972]: DEBUG nova.network.neutron [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Refreshing network info cache for port 5242c0c7-f0be-48bd-9cdb-facff0c96198 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.354511] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e73a073-4ba9-4698-a2fe-2ed95030039f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.368531] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53bb8c3-bada-449a-854b-4d8a3a5c2965 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.390662] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba could not be found. [ 829.391047] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.391047] env[61972]: INFO nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Took 0.04 seconds to destroy the instance on the hypervisor. [ 829.391261] env[61972]: DEBUG oslo.service.loopingcall [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.391467] env[61972]: DEBUG nova.compute.manager [-] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 829.391568] env[61972]: DEBUG nova.network.neutron [-] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.410418] env[61972]: DEBUG nova.network.neutron [-] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.509090] env[61972]: INFO nova.compute.manager [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] [instance: 67ecabfd-4efc-4e1c-a708-107197cfd018] Took 1.03 seconds to deallocate network for instance. [ 829.714840] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.714840] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 829.717635] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.283s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.719031] env[61972]: INFO nova.compute.claims [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.877061] env[61972]: DEBUG nova.network.neutron [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.912603] env[61972]: DEBUG nova.network.neutron [-] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.978986] env[61972]: DEBUG nova.network.neutron [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.223905] env[61972]: DEBUG nova.compute.utils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.227178] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 830.227353] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.273237] env[61972]: DEBUG nova.policy [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47ebbe5ddb8b41bbb1a54cf191aef61a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '651d8f34661542219f5451bce866ec02', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.415022] env[61972]: INFO nova.compute.manager [-] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Took 1.02 seconds to deallocate network for instance. [ 830.417473] env[61972]: DEBUG nova.compute.claims [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 830.417473] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.481618] env[61972]: DEBUG oslo_concurrency.lockutils [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] Releasing lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.481618] env[61972]: DEBUG nova.compute.manager [req-885945f7-b5e9-4bb4-b925-ca4621023134 req-2eeb55df-b09f-44c6-802d-0ecc8ee2c27f service nova] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Received event network-vif-deleted-5242c0c7-f0be-48bd-9cdb-facff0c96198 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 830.541821] env[61972]: INFO nova.scheduler.client.report [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Deleted allocations for instance 67ecabfd-4efc-4e1c-a708-107197cfd018 [ 830.547943] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Successfully created port: a827c64c-a00d-4d2d-af6e-29c34ca4b899 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.727823] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 830.988489] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0113120-06c3-4b39-9e3d-c42100468f8a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.996450] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e5b173-e0a5-4311-959c-30dfcc143f11 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.026973] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6208de4-9e85-4aeb-8b2e-f8a6927d7d26 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.034507] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f927823-7185-4948-86ca-746b5e3ffa65 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.047903] env[61972]: DEBUG nova.compute.provider_tree [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.051531] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f949885c-60ac-428c-bf50-1457f93bd821 tempest-ServerRescueTestJSON-1610341930 tempest-ServerRescueTestJSON-1610341930-project-member] Lock "67ecabfd-4efc-4e1c-a708-107197cfd018" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 174.226s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.551654] env[61972]: DEBUG nova.scheduler.client.report [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 831.555039] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 831.741494] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 831.770414] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 831.770668] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 831.770824] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.770999] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 831.771199] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.771353] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 831.771562] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 831.772350] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 831.772350] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 831.772350] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 831.772350] env[61972]: DEBUG nova.virt.hardware [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.773317] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176b9918-694f-4bde-ab2f-bf023cacfc49 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.783075] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c757b880-a5ec-468b-8fa9-937b5773e7c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.059371] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.059915] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 832.065597] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.749s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.066971] env[61972]: INFO nova.compute.claims [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.093275] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.567169] env[61972]: DEBUG nova.compute.utils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 832.570117] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 832.570117] env[61972]: DEBUG nova.network.neutron [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.617528] env[61972]: DEBUG nova.policy [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6b7c5b037a54c8cbd151ad0f1875f37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbbaa322b60942819cfb147b5201daf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 832.904812] env[61972]: DEBUG nova.network.neutron [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Successfully created port: b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.072797] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 833.391792] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08692ffa-9be7-4f51-9394-8ef7c8ce9370 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.402190] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd80129-eadc-4a20-9461-f17e2cc3d772 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.436954] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95576df-8e92-4d72-9f2d-1fe4affe7b77 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.444259] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be0ccda-c6f2-4bde-8457-1e44e51cde06 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.458887] env[61972]: DEBUG nova.compute.provider_tree [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.962556] env[61972]: DEBUG nova.scheduler.client.report [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 834.082614] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 834.106652] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.106900] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.107072] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.107248] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.107393] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.107538] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.107743] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.107900] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.108083] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.108247] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.108418] env[61972]: DEBUG nova.virt.hardware [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.109594] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e26ceb-70cb-49d3-8e46-412716b20f62 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.117839] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79bdde3-862f-4f19-9e76-1e6b3c932114 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.468877] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.469396] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 834.472042] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.071s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.573973] env[61972]: DEBUG nova.compute.manager [req-975f8183-0292-432d-832e-db15a56dfa6e req-4847c419-398f-4635-bb3d-91943c788a81 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Received event network-vif-plugged-b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 834.574206] env[61972]: DEBUG oslo_concurrency.lockutils [req-975f8183-0292-432d-832e-db15a56dfa6e req-4847c419-398f-4635-bb3d-91943c788a81 service nova] Acquiring lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.574414] env[61972]: DEBUG oslo_concurrency.lockutils [req-975f8183-0292-432d-832e-db15a56dfa6e req-4847c419-398f-4635-bb3d-91943c788a81 service nova] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.574658] env[61972]: DEBUG oslo_concurrency.lockutils [req-975f8183-0292-432d-832e-db15a56dfa6e req-4847c419-398f-4635-bb3d-91943c788a81 service nova] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.574742] env[61972]: DEBUG nova.compute.manager [req-975f8183-0292-432d-832e-db15a56dfa6e req-4847c419-398f-4635-bb3d-91943c788a81 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] No waiting events found dispatching network-vif-plugged-b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.574926] env[61972]: WARNING nova.compute.manager [req-975f8183-0292-432d-832e-db15a56dfa6e req-4847c419-398f-4635-bb3d-91943c788a81 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Received unexpected event network-vif-plugged-b4dc0e54-ff1f-458b-98f8-cfddec6ef15a for instance with vm_state building and task_state spawning. [ 834.597666] env[61972]: DEBUG nova.network.neutron [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Successfully updated port: b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.976331] env[61972]: DEBUG nova.compute.utils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.977733] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 834.977904] env[61972]: DEBUG nova.network.neutron [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.018378] env[61972]: DEBUG nova.policy [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7ab1f4b73de4fda898ad236977ca5cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f437d1d2984539831da3c83b835a99', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 835.100274] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.100435] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.100597] env[61972]: DEBUG nova.network.neutron [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.216241] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385c4fc3-7085-4ade-8a2f-ad0944be6c8a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.223704] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c148b797-7851-47b2-a265-74effb3fccc3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.252706] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37747e5-74f1-4fd3-99e6-31e630ee1ba0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.260332] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8043a8fa-600e-4fd0-b777-90cbfa32b857 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.275287] env[61972]: DEBUG nova.compute.provider_tree [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.373891] env[61972]: DEBUG nova.network.neutron [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Successfully created port: bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.481245] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 835.640674] env[61972]: DEBUG nova.network.neutron [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.780093] env[61972]: DEBUG nova.scheduler.client.report [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 835.828250] env[61972]: DEBUG nova.network.neutron [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updating instance_info_cache with network_info: [{"id": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "address": "fa:16:3e:bd:20:18", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dc0e54-ff", "ovs_interfaceid": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.286311] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.814s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.286962] env[61972]: ERROR nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Traceback (most recent call last): [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self.driver.spawn(context, instance, image_meta, [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] vm_ref = self.build_virtual_machine(instance, [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.286962] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] for vif in network_info: [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return self._sync_wrapper(fn, *args, **kwargs) [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self.wait() [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self[:] = self._gt.wait() [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return self._exit_event.wait() [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] result = hub.switch() [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 836.287523] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return self.greenlet.switch() [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] result = function(*args, **kwargs) [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] return func(*args, **kwargs) [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] raise e [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] nwinfo = self.network_api.allocate_for_instance( [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] created_port_ids = self._update_ports_for_instance( [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] with excutils.save_and_reraise_exception(): [ 836.288124] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] self.force_reraise() [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] raise self.value [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] updated_port = self._update_port( [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] _ensure_no_port_binding_failure(port) [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] raise exception.PortBindingFailed(port_id=port['id']) [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] nova.exception.PortBindingFailed: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. [ 836.288699] env[61972]: ERROR nova.compute.manager [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] [ 836.289224] env[61972]: DEBUG nova.compute.utils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 836.289279] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.511s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.290690] env[61972]: INFO nova.compute.claims [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.294028] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Build of instance 49cd5798-1f76-4690-bea7-cebd98a84f5c was re-scheduled: Binding failed for port d6f761a0-58d8-4194-a7aa-aebf39902f1b, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 836.294589] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 836.294818] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquiring lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.294961] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Acquired lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.295138] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.329817] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.330146] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Instance network_info: |[{"id": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "address": "fa:16:3e:bd:20:18", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dc0e54-ff", "ovs_interfaceid": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 836.330939] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:20:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '838c9497-35dd-415e-96c7-8dc21b0cd4b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4dc0e54-ff1f-458b-98f8-cfddec6ef15a', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.338807] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating folder: Project (dbbaa322b60942819cfb147b5201daf4). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.339750] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7c529b8-c991-4d93-bc17-f4a20bfb6a3e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.352116] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Created folder: Project (dbbaa322b60942819cfb147b5201daf4) in parent group-v294799. [ 836.352310] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating folder: Instances. Parent ref: group-v294820. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.352610] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-609cb6c3-26c4-403e-b847-35c78831f3de {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.360711] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Created folder: Instances in parent group-v294820. [ 836.360937] env[61972]: DEBUG oslo.service.loopingcall [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.361133] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.361328] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d73dd58-4684-4dc4-8ec1-299273062dd3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.380879] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.380879] env[61972]: value = "task-1389147" [ 836.380879] env[61972]: _type = "Task" [ 836.380879] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.388547] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389147, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.491172] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 836.518733] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.518988] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.519157] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.519339] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.519539] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.519624] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.520207] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.520561] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.520782] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.520949] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.521134] env[61972]: DEBUG nova.virt.hardware [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.521976] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7640b63b-ecf0-4b02-a94d-0c9862621efa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.529830] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538dedb0-54bb-44e1-ad29-b916915a50ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.599468] env[61972]: DEBUG nova.compute.manager [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Received event network-changed-b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 836.599674] env[61972]: DEBUG nova.compute.manager [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Refreshing instance network info cache due to event network-changed-b4dc0e54-ff1f-458b-98f8-cfddec6ef15a. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 836.599887] env[61972]: DEBUG oslo_concurrency.lockutils [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] Acquiring lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.600040] env[61972]: DEBUG oslo_concurrency.lockutils [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] Acquired lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.600201] env[61972]: DEBUG nova.network.neutron [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Refreshing network info cache for port b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.813869] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.866717] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.890534] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389147, 'name': CreateVM_Task, 'duration_secs': 0.366923} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.890707] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.897454] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.897628] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.897939] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.898198] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f67ab08-7ae0-4e9c-9f7c-6402936b41b2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.902904] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 836.902904] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5202b3db-546a-b46b-e2e1-e97b4c60b661" [ 836.902904] env[61972]: _type = "Task" [ 836.902904] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.910715] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5202b3db-546a-b46b-e2e1-e97b4c60b661, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.019970] env[61972]: DEBUG nova.network.neutron [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Successfully updated port: bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.369119] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Releasing lock "refresh_cache-49cd5798-1f76-4690-bea7-cebd98a84f5c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.369974] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 837.369974] env[61972]: DEBUG nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 837.369974] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.388594] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.414055] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5202b3db-546a-b46b-e2e1-e97b4c60b661, 'name': SearchDatastore_Task, 'duration_secs': 0.012574} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.414368] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.414596] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.414823] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.414989] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.415193] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.415469] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4c1e20f-ae59-4121-bc20-0be7fc76d682 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.425712] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.425882] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.426618] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f616fa-1dde-47df-ab8e-b3e94f317155 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.433968] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 837.433968] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5269d032-2810-b7e6-efe2-347c79b827bb" [ 837.433968] env[61972]: _type = "Task" [ 837.433968] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.441372] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5269d032-2810-b7e6-efe2-347c79b827bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.521521] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "refresh_cache-1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.521682] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquired lock "refresh_cache-1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.521824] env[61972]: DEBUG nova.network.neutron [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.566514] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1f6397-a6cc-43d8-8325-e03bb6231f30 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.574866] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61dcef1-a1de-40c2-98dd-f7d3cddc086b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.579850] env[61972]: DEBUG nova.network.neutron [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updated VIF entry in instance network info cache for port b4dc0e54-ff1f-458b-98f8-cfddec6ef15a. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.580189] env[61972]: DEBUG nova.network.neutron [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updating instance_info_cache with network_info: [{"id": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "address": "fa:16:3e:bd:20:18", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dc0e54-ff", "ovs_interfaceid": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.606425] env[61972]: DEBUG oslo_concurrency.lockutils [req-2f308b8c-2f45-4439-b926-fb5d7444fff1 req-e8a5f9c5-da35-45a4-a91c-7bfd31b84cfe service nova] Releasing lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.607269] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f154303-5faf-45ba-8da7-a1adcb646514 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.615317] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b65067-f12f-48da-bca7-3fd707bfc8ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.628609] env[61972]: DEBUG nova.compute.provider_tree [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.890597] env[61972]: DEBUG nova.network.neutron [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.945683] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5269d032-2810-b7e6-efe2-347c79b827bb, 'name': SearchDatastore_Task, 'duration_secs': 0.009304} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.946490] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1ab71d0-fd64-42f9-8cc2-c804e2ed3095 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.951750] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 837.951750] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]528543d8-766f-2580-8437-69fa4da8e0e0" [ 837.951750] env[61972]: _type = "Task" [ 837.951750] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.960186] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]528543d8-766f-2580-8437-69fa4da8e0e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.061261] env[61972]: DEBUG nova.network.neutron [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.131533] env[61972]: DEBUG nova.scheduler.client.report [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 838.215924] env[61972]: DEBUG nova.network.neutron [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Updating instance_info_cache with network_info: [{"id": "bd88da82-32a8-4336-bc68-1faad1ca062a", "address": "fa:16:3e:fb:47:c0", "network": {"id": "e5010f92-1c45-4b91-86bd-46787d73f8a2", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-620457548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f437d1d2984539831da3c83b835a99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd88da82-32", "ovs_interfaceid": "bd88da82-32a8-4336-bc68-1faad1ca062a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.393395] env[61972]: INFO nova.compute.manager [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] [instance: 49cd5798-1f76-4690-bea7-cebd98a84f5c] Took 1.02 seconds to deallocate network for instance. [ 838.462171] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]528543d8-766f-2580-8437-69fa4da8e0e0, 'name': SearchDatastore_Task, 'duration_secs': 0.010194} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.462442] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.462922] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 0cd09167-2c2f-4cad-b26d-35aa208fbf79/0cd09167-2c2f-4cad-b26d-35aa208fbf79.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.463463] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59c130ed-b2a0-40da-a1a1-2172df7875ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.470415] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 838.470415] env[61972]: value = "task-1389148" [ 838.470415] env[61972]: _type = "Task" [ 838.470415] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.477707] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389148, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.633712] env[61972]: DEBUG nova.compute.manager [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Received event network-vif-plugged-bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 838.634061] env[61972]: DEBUG oslo_concurrency.lockutils [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] Acquiring lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.634402] env[61972]: DEBUG oslo_concurrency.lockutils [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.634579] env[61972]: DEBUG oslo_concurrency.lockutils [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.634777] env[61972]: DEBUG nova.compute.manager [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] No waiting events found dispatching network-vif-plugged-bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 838.635008] env[61972]: WARNING nova.compute.manager [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Received unexpected event network-vif-plugged-bd88da82-32a8-4336-bc68-1faad1ca062a for instance with vm_state building and task_state spawning. [ 838.635192] env[61972]: DEBUG nova.compute.manager [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Received event network-changed-bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 838.635346] env[61972]: DEBUG nova.compute.manager [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Refreshing instance network info cache due to event network-changed-bd88da82-32a8-4336-bc68-1faad1ca062a. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 838.635510] env[61972]: DEBUG oslo_concurrency.lockutils [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] Acquiring lock "refresh_cache-1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.636532] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.637061] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 838.640603] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.113s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.642163] env[61972]: INFO nova.compute.claims [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.718713] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Releasing lock "refresh_cache-1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.719086] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Instance network_info: |[{"id": "bd88da82-32a8-4336-bc68-1faad1ca062a", "address": "fa:16:3e:fb:47:c0", "network": {"id": "e5010f92-1c45-4b91-86bd-46787d73f8a2", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-620457548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f437d1d2984539831da3c83b835a99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd88da82-32", "ovs_interfaceid": "bd88da82-32a8-4336-bc68-1faad1ca062a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 838.719395] env[61972]: DEBUG oslo_concurrency.lockutils [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] Acquired lock "refresh_cache-1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.719573] env[61972]: DEBUG nova.network.neutron [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Refreshing network info cache for port bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.721086] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:47:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd88da82-32a8-4336-bc68-1faad1ca062a', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.729756] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Creating folder: Project (82f437d1d2984539831da3c83b835a99). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.730882] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1d8b521-cb7c-4fc5-a5da-7461ca526a12 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.741942] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Created folder: Project (82f437d1d2984539831da3c83b835a99) in parent group-v294799. [ 838.742159] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Creating folder: Instances. Parent ref: group-v294823. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.742397] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90240fde-5163-48a2-abf6-eeaf5d5a1e3f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.754969] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Created folder: Instances in parent group-v294823. [ 838.755233] env[61972]: DEBUG oslo.service.loopingcall [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.755426] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.755640] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2834b3b-8de8-45d7-8ca1-a540fe4199aa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.776052] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.776052] env[61972]: value = "task-1389151" [ 838.776052] env[61972]: _type = "Task" [ 838.776052] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.783808] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389151, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.982060] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389148, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50548} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.982385] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 0cd09167-2c2f-4cad-b26d-35aa208fbf79/0cd09167-2c2f-4cad-b26d-35aa208fbf79.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.982763] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.983067] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e6c2620-d078-4790-8f97-994f4f5595bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.988988] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 838.988988] env[61972]: value = "task-1389152" [ 838.988988] env[61972]: _type = "Task" [ 838.988988] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.997161] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389152, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.142775] env[61972]: DEBUG nova.compute.utils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 839.144291] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 839.144478] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.222897] env[61972]: DEBUG nova.policy [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34838d72bc5c40e4861aeb1bc2346e0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e6f816e56de421ba4a2d7de91a6550c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 839.286173] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389151, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.428417] env[61972]: INFO nova.scheduler.client.report [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Deleted allocations for instance 49cd5798-1f76-4690-bea7-cebd98a84f5c [ 839.440831] env[61972]: DEBUG nova.network.neutron [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Updated VIF entry in instance network info cache for port bd88da82-32a8-4336-bc68-1faad1ca062a. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.441697] env[61972]: DEBUG nova.network.neutron [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Updating instance_info_cache with network_info: [{"id": "bd88da82-32a8-4336-bc68-1faad1ca062a", "address": "fa:16:3e:fb:47:c0", "network": {"id": "e5010f92-1c45-4b91-86bd-46787d73f8a2", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-620457548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f437d1d2984539831da3c83b835a99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd88da82-32", "ovs_interfaceid": "bd88da82-32a8-4336-bc68-1faad1ca062a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.499979] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389152, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065753} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.500309] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.501427] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Successfully created port: 51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.505208] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27237b5f-6f15-4d08-bd55-bc0122ec9e1f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.527471] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 0cd09167-2c2f-4cad-b26d-35aa208fbf79/0cd09167-2c2f-4cad-b26d-35aa208fbf79.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.528138] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95c7622a-65e3-4ccf-9b99-bd2bfcf24961 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.546956] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 839.546956] env[61972]: value = "task-1389153" [ 839.546956] env[61972]: _type = "Task" [ 839.546956] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.554586] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.650045] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 839.786586] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389151, 'name': CreateVM_Task, 'duration_secs': 0.519253} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.786757] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.788052] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.788052] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.788052] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.788379] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e75674bc-7048-4225-bde6-906048589873 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.799292] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 839.799292] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52874fb8-4602-89e8-76fb-1142fd2e590c" [ 839.799292] env[61972]: _type = "Task" [ 839.799292] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.811101] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52874fb8-4602-89e8-76fb-1142fd2e590c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.921153] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6fded6-07c4-4f68-ab3f-29bbbe05aa0f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.928316] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91eded55-7b2b-4efb-be24-57399f015f02 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.957326] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b3ac894d-deff-47df-9fe5-4da2fa448568 tempest-MigrationsAdminTest-1934187052 tempest-MigrationsAdminTest-1934187052-project-member] Lock "49cd5798-1f76-4690-bea7-cebd98a84f5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.565s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.957749] env[61972]: DEBUG oslo_concurrency.lockutils [req-a150b120-bab1-434d-a6f7-54194f25b455 req-f94672fb-472b-4281-938a-64901a795ad2 service nova] Releasing lock "refresh_cache-1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.959932] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f8e373-a77a-4b00-9ccd-be8b61548564 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.967128] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e14b04a-f3ab-4fa9-811c-fd05896f7299 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.980405] env[61972]: DEBUG nova.compute.provider_tree [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.056991] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389153, 'name': ReconfigVM_Task, 'duration_secs': 0.301811} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.057294] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 0cd09167-2c2f-4cad-b26d-35aa208fbf79/0cd09167-2c2f-4cad-b26d-35aa208fbf79.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.057925] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec99338e-69de-42a8-bf57-74a5c43f5b4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.064157] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 840.064157] env[61972]: value = "task-1389154" [ 840.064157] env[61972]: _type = "Task" [ 840.064157] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.071404] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389154, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.310406] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52874fb8-4602-89e8-76fb-1142fd2e590c, 'name': SearchDatastore_Task, 'duration_secs': 0.009821} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.310539] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.310762] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.310992] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.311378] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.311378] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.311569] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e85c5c34-24b4-4d1a-8c75-a2cd2cd93177 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.319838] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.320024] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.320827] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9357b9f2-1ffa-4397-b93d-fc7d993ad176 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.326105] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 840.326105] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5276e44d-d344-b424-bcca-6449cea1e9a2" [ 840.326105] env[61972]: _type = "Task" [ 840.326105] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.333490] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5276e44d-d344-b424-bcca-6449cea1e9a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.464170] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 840.483522] env[61972]: DEBUG nova.scheduler.client.report [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 840.575112] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389154, 'name': Rename_Task, 'duration_secs': 0.176857} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.575404] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.575638] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-259abbdd-f66e-4c11-9ad1-001854c757f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.581978] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 840.581978] env[61972]: value = "task-1389155" [ 840.581978] env[61972]: _type = "Task" [ 840.581978] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.589510] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.663671] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 840.690213] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.690467] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.690621] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.690795] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.690937] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.695187] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.695468] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.695641] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.695812] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.695976] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.696169] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.697025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064a9e87-230c-4eaf-9cd2-b52d24a63f33 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.709573] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcfcac0-649b-4f45-8550-a5dc1a960c9a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.836695] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5276e44d-d344-b424-bcca-6449cea1e9a2, 'name': SearchDatastore_Task, 'duration_secs': 0.008373} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.837631] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abaa5835-2f82-4ee4-9803-2377786b27fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.843703] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 840.843703] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e9fdd3-35ad-f854-608c-ca517b3888e8" [ 840.843703] env[61972]: _type = "Task" [ 840.843703] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.851343] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e9fdd3-35ad-f854-608c-ca517b3888e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.987880] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.988522] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 840.991470] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.635s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.994612] env[61972]: INFO nova.compute.claims [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.000023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.091736] env[61972]: DEBUG oslo_vmware.api [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389155, 'name': PowerOnVM_Task, 'duration_secs': 0.450983} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.091991] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.092572] env[61972]: INFO nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Took 7.01 seconds to spawn the instance on the hypervisor. [ 841.092572] env[61972]: DEBUG nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 841.094176] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f61045b-8e0b-43db-ba32-906a4395dbc5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.252111] env[61972]: DEBUG nova.compute.manager [req-dd9b2f7d-0c4f-461b-a06f-18c3879d6b07 req-b31d31e6-364a-45e7-971b-ede67e7bc2ae service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Received event network-vif-plugged-51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 841.252111] env[61972]: DEBUG oslo_concurrency.lockutils [req-dd9b2f7d-0c4f-461b-a06f-18c3879d6b07 req-b31d31e6-364a-45e7-971b-ede67e7bc2ae service nova] Acquiring lock "89cbc6ec-7546-443c-9abb-47940d223daa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.252111] env[61972]: DEBUG oslo_concurrency.lockutils [req-dd9b2f7d-0c4f-461b-a06f-18c3879d6b07 req-b31d31e6-364a-45e7-971b-ede67e7bc2ae service nova] Lock "89cbc6ec-7546-443c-9abb-47940d223daa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.252111] env[61972]: DEBUG oslo_concurrency.lockutils [req-dd9b2f7d-0c4f-461b-a06f-18c3879d6b07 req-b31d31e6-364a-45e7-971b-ede67e7bc2ae service nova] Lock "89cbc6ec-7546-443c-9abb-47940d223daa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.252111] env[61972]: DEBUG nova.compute.manager [req-dd9b2f7d-0c4f-461b-a06f-18c3879d6b07 req-b31d31e6-364a-45e7-971b-ede67e7bc2ae service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] No waiting events found dispatching network-vif-plugged-51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 841.252729] env[61972]: WARNING nova.compute.manager [req-dd9b2f7d-0c4f-461b-a06f-18c3879d6b07 req-b31d31e6-364a-45e7-971b-ede67e7bc2ae service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Received unexpected event network-vif-plugged-51a88e7f-2e39-4674-9816-01cbb3bb08f1 for instance with vm_state building and task_state spawning. [ 841.311227] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Successfully updated port: 51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.325183] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.325183] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.356949] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e9fdd3-35ad-f854-608c-ca517b3888e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009051} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.357057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.357298] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b/1cd50cd6-ccb2-41aa-8c24-9eabed18de6b.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.357740] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ecc39b9-9e05-469b-9299-0f7995cd5f2d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.364538] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 841.364538] env[61972]: value = "task-1389156" [ 841.364538] env[61972]: _type = "Task" [ 841.364538] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.374050] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.499571] env[61972]: DEBUG nova.compute.utils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.503208] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 841.503373] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.569148] env[61972]: DEBUG nova.policy [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34838d72bc5c40e4861aeb1bc2346e0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e6f816e56de421ba4a2d7de91a6550c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 841.616032] env[61972]: INFO nova.compute.manager [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Took 26.20 seconds to build instance. [ 841.814568] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "refresh_cache-89cbc6ec-7546-443c-9abb-47940d223daa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.814568] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "refresh_cache-89cbc6ec-7546-443c-9abb-47940d223daa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.814568] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.834036] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.834036] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 841.834036] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 841.878209] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389156, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.929665] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Successfully created port: dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.004437] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 842.118454] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a3c4b12c-0e1c-4a60-9fe8-4b909b59bb86 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.663s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.242992] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adaa462-83df-40b9-87a1-07c2e48442fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.250893] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693cb101-c899-44df-82e6-8536bc00e821 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.283707] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79db1126-a3e4-4e32-b32e-d92255acaddc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.291129] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a66b17d-4b51-4ea0-9ff2-75c99ca478d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.304864] env[61972]: DEBUG nova.compute.provider_tree [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.339406] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.339572] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.339721] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.339850] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.339971] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.340105] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.340225] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 842.345663] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.376949] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389156, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51446} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.377786] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b/1cd50cd6-ccb2-41aa-8c24-9eabed18de6b.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.377786] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.377786] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5473beee-720b-432c-bbbe-091b6aa564c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.384812] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 842.384812] env[61972]: value = "task-1389157" [ 842.384812] env[61972]: _type = "Task" [ 842.384812] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.395185] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.418355] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.418601] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquired lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.418824] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Forcefully refreshing network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 842.418998] env[61972]: DEBUG nova.objects.instance [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lazy-loading 'info_cache' on Instance uuid 0cd09167-2c2f-4cad-b26d-35aa208fbf79 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.531224] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Updating instance_info_cache with network_info: [{"id": "51a88e7f-2e39-4674-9816-01cbb3bb08f1", "address": "fa:16:3e:b3:73:20", "network": {"id": "bf233d31-2d98-4a4e-b60e-7744f1079f60", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-708354741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e6f816e56de421ba4a2d7de91a6550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51a88e7f-2e", "ovs_interfaceid": "51a88e7f-2e39-4674-9816-01cbb3bb08f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.621012] env[61972]: DEBUG nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 842.807796] env[61972]: DEBUG nova.scheduler.client.report [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 842.896167] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07415} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.896436] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.897237] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f5bf46-6a84-403e-86c1-06b35c033b4b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.919308] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b/1cd50cd6-ccb2-41aa-8c24-9eabed18de6b.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.919392] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-085bf79a-bc6e-4959-9e88-aaa8f066d5df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.942271] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 842.942271] env[61972]: value = "task-1389158" [ 842.942271] env[61972]: _type = "Task" [ 842.942271] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.952089] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389158, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.022426] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 843.025641] env[61972]: DEBUG nova.compute.manager [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 843.026560] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef78346b-d5f6-4911-8d86-9ebf386b80dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.033943] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "refresh_cache-89cbc6ec-7546-443c-9abb-47940d223daa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.034159] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Instance network_info: |[{"id": "51a88e7f-2e39-4674-9816-01cbb3bb08f1", "address": "fa:16:3e:b3:73:20", "network": {"id": "bf233d31-2d98-4a4e-b60e-7744f1079f60", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-708354741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e6f816e56de421ba4a2d7de91a6550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51a88e7f-2e", "ovs_interfaceid": "51a88e7f-2e39-4674-9816-01cbb3bb08f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 843.037162] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:73:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51a88e7f-2e39-4674-9816-01cbb3bb08f1', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.044460] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Creating folder: Project (9e6f816e56de421ba4a2d7de91a6550c). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.046552] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a3a8e66-ff21-4f70-a216-cbf4cc1a51ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.054304] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.054621] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.054698] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.054891] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.055010] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.055151] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.055516] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.055580] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.055753] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.055922] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.056100] env[61972]: DEBUG nova.virt.hardware [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.057261] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de496dca-7214-43d9-82d4-81d70d73c96b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.061258] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Created folder: Project (9e6f816e56de421ba4a2d7de91a6550c) in parent group-v294799. [ 843.061471] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Creating folder: Instances. Parent ref: group-v294826. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.062096] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb6f31a5-a471-4a94-9c3a-961ce326e39b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.067729] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0262e6e2-9452-4b93-b838-f095520fffd1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.073155] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Created folder: Instances in parent group-v294826. [ 843.073377] env[61972]: DEBUG oslo.service.loopingcall [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.080671] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 843.081327] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39c0957b-beee-4ca8-82dd-299350098498 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.102980] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 843.102980] env[61972]: value = "task-1389161" [ 843.102980] env[61972]: _type = "Task" [ 843.102980] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.110115] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389161, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.142329] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.313194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.313371] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 843.315993] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.607s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.331300] env[61972]: DEBUG nova.compute.manager [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Received event network-changed-51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 843.331452] env[61972]: DEBUG nova.compute.manager [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Refreshing instance network info cache due to event network-changed-51a88e7f-2e39-4674-9816-01cbb3bb08f1. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 843.331693] env[61972]: DEBUG oslo_concurrency.lockutils [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] Acquiring lock "refresh_cache-89cbc6ec-7546-443c-9abb-47940d223daa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.331819] env[61972]: DEBUG oslo_concurrency.lockutils [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] Acquired lock "refresh_cache-89cbc6ec-7546-443c-9abb-47940d223daa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.332146] env[61972]: DEBUG nova.network.neutron [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Refreshing network info cache for port 51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.450906] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389158, 'name': ReconfigVM_Task, 'duration_secs': 0.33367} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.451148] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b/1cd50cd6-ccb2-41aa-8c24-9eabed18de6b.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.451780] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95028538-79d6-4882-801b-38da5da75aaa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.457694] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 843.457694] env[61972]: value = "task-1389162" [ 843.457694] env[61972]: _type = "Task" [ 843.457694] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.473475] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389162, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.539425] env[61972]: INFO nova.compute.manager [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] instance snapshotting [ 843.544747] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73996f07-5e72-40e5-926d-0f5e7d8f02d3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.564874] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6315462d-9da1-402f-a817-ba5d683f48ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.617102] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389161, 'name': CreateVM_Task, 'duration_secs': 0.334961} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.617288] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.617971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.618153] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.618479] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 843.618728] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23caa193-122b-4111-a563-13aeba5a6779 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.623476] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 843.623476] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f92b70-3a47-dcff-2886-75445ced7987" [ 843.623476] env[61972]: _type = "Task" [ 843.623476] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.632770] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f92b70-3a47-dcff-2886-75445ced7987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.755581] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Successfully updated port: dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.821144] env[61972]: DEBUG nova.compute.utils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 843.827168] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 843.827450] env[61972]: DEBUG nova.network.neutron [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.872090] env[61972]: DEBUG nova.policy [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc3cd61498bc4f858a47a72f02466b3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3c052a272742808be2bcdc71d8f62f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 843.970481] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389162, 'name': Rename_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.071757] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e2a8c5-41ba-4f65-b254-68363f6adc6b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.076887] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 844.077495] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3cbba3a8-cc12-4398-8e38-4cbf85dd92a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.084026] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5885542a-ee21-46a0-b51d-65d66512ba81 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.087933] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 844.087933] env[61972]: value = "task-1389163" [ 844.087933] env[61972]: _type = "Task" [ 844.087933] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.120201] env[61972]: DEBUG nova.network.neutron [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Updated VIF entry in instance network info cache for port 51a88e7f-2e39-4674-9816-01cbb3bb08f1. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.120586] env[61972]: DEBUG nova.network.neutron [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Updating instance_info_cache with network_info: [{"id": "51a88e7f-2e39-4674-9816-01cbb3bb08f1", "address": "fa:16:3e:b3:73:20", "network": {"id": "bf233d31-2d98-4a4e-b60e-7744f1079f60", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-708354741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e6f816e56de421ba4a2d7de91a6550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51a88e7f-2e", "ovs_interfaceid": "51a88e7f-2e39-4674-9816-01cbb3bb08f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.122620] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76c54fe-c89a-4e61-ba31-728a09e44ce1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.128643] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389163, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.142180] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e09137-e5f8-4f4f-81fe-54da238ada39 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.144743] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f92b70-3a47-dcff-2886-75445ced7987, 'name': SearchDatastore_Task, 'duration_secs': 0.008773} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.145069] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.145303] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 844.145523] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.145665] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.145843] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.146462] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0185ff07-f88a-4289-8d8f-cb1b9c5268dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.156400] env[61972]: DEBUG nova.compute.provider_tree [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.163571] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.163761] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 844.165824] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17c57ebd-e659-499f-b68f-dc1b85b42cbf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.170066] env[61972]: DEBUG nova.network.neutron [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Successfully created port: f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.173549] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 844.173549] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522577ae-a5e6-f77b-3b80-17b97f7a19da" [ 844.173549] env[61972]: _type = "Task" [ 844.173549] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.181620] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]522577ae-a5e6-f77b-3b80-17b97f7a19da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.229929] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updating instance_info_cache with network_info: [{"id": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "address": "fa:16:3e:bd:20:18", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dc0e54-ff", "ovs_interfaceid": "b4dc0e54-ff1f-458b-98f8-cfddec6ef15a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.258329] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "refresh_cache-caad50a8-e0ad-4ca9-b391-691ead1756f0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.258473] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "refresh_cache-caad50a8-e0ad-4ca9-b391-691ead1756f0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.258662] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.333575] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 844.468997] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389162, 'name': Rename_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.599187] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389163, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.629112] env[61972]: DEBUG oslo_concurrency.lockutils [req-87192589-a7ca-4666-a0cb-c1b2156cb84c req-f4c0ef48-709e-473c-94b6-b0e165c135f0 service nova] Releasing lock "refresh_cache-89cbc6ec-7546-443c-9abb-47940d223daa" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.663955] env[61972]: DEBUG nova.scheduler.client.report [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 844.686718] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]522577ae-a5e6-f77b-3b80-17b97f7a19da, 'name': SearchDatastore_Task, 'duration_secs': 0.01039} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.687593] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa3d331e-34b4-41e0-8038-38d6f5568f4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.692832] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 844.692832] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52807f68-ec3a-d9f9-582b-25c562c23fe2" [ 844.692832] env[61972]: _type = "Task" [ 844.692832] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.702716] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52807f68-ec3a-d9f9-582b-25c562c23fe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.733094] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Releasing lock "refresh_cache-0cd09167-2c2f-4cad-b26d-35aa208fbf79" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.733395] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updated the network info_cache for instance {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 844.733658] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.733870] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.734076] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.734713] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.734713] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.734713] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.734713] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 844.734881] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.796237] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.941068] env[61972]: DEBUG nova.network.neutron [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Updating instance_info_cache with network_info: [{"id": "dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c", "address": "fa:16:3e:2f:7b:b9", "network": {"id": "bf233d31-2d98-4a4e-b60e-7744f1079f60", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-708354741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e6f816e56de421ba4a2d7de91a6550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbf9d428-9e", "ovs_interfaceid": "dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.968471] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389162, 'name': Rename_Task, 'duration_secs': 1.066778} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.968733] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.968976] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02045605-1dea-4db5-bb36-481108018bdf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.975135] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 844.975135] env[61972]: value = "task-1389164" [ 844.975135] env[61972]: _type = "Task" [ 844.975135] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.982559] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.098498] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389163, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.167614] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.851s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.168355] env[61972]: ERROR nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Traceback (most recent call last): [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self.driver.spawn(context, instance, image_meta, [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] vm_ref = self.build_virtual_machine(instance, [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] vif_infos = vmwarevif.get_vif_info(self._session, [ 845.168355] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] for vif in network_info: [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return self._sync_wrapper(fn, *args, **kwargs) [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self.wait() [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self[:] = self._gt.wait() [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return self._exit_event.wait() [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] result = hub.switch() [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 845.168675] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return self.greenlet.switch() [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] result = function(*args, **kwargs) [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] return func(*args, **kwargs) [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] raise e [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] nwinfo = self.network_api.allocate_for_instance( [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] created_port_ids = self._update_ports_for_instance( [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] with excutils.save_and_reraise_exception(): [ 845.168998] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] self.force_reraise() [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] raise self.value [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] updated_port = self._update_port( [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] _ensure_no_port_binding_failure(port) [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] raise exception.PortBindingFailed(port_id=port['id']) [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] nova.exception.PortBindingFailed: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. [ 845.169327] env[61972]: ERROR nova.compute.manager [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] [ 845.169589] env[61972]: DEBUG nova.compute.utils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 845.171259] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Build of instance 8a9a51b5-a8a5-4bda-a36c-682758f50745 was re-scheduled: Binding failed for port 440d6213-231d-4449-bc26-8cac897fd0da, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 845.171422] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 845.171672] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquiring lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.171800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Acquired lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.171956] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.173190] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.756s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.204046] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52807f68-ec3a-d9f9-582b-25c562c23fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.010121} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.204046] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.204202] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 89cbc6ec-7546-443c-9abb-47940d223daa/89cbc6ec-7546-443c-9abb-47940d223daa.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.204359] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-720a64d3-84ed-4da0-9f9c-2321f1604cba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.212406] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 845.212406] env[61972]: value = "task-1389165" [ 845.212406] env[61972]: _type = "Task" [ 845.212406] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.220374] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.237715] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.341330] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 845.356579] env[61972]: DEBUG nova.compute.manager [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Received event network-vif-plugged-dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 845.356838] env[61972]: DEBUG oslo_concurrency.lockutils [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] Acquiring lock "caad50a8-e0ad-4ca9-b391-691ead1756f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.357095] env[61972]: DEBUG oslo_concurrency.lockutils [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.357307] env[61972]: DEBUG oslo_concurrency.lockutils [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.357507] env[61972]: DEBUG nova.compute.manager [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] No waiting events found dispatching network-vif-plugged-dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 845.357698] env[61972]: WARNING nova.compute.manager [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Received unexpected event network-vif-plugged-dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c for instance with vm_state building and task_state spawning. [ 845.357894] env[61972]: DEBUG nova.compute.manager [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Received event network-changed-dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 845.358218] env[61972]: DEBUG nova.compute.manager [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Refreshing instance network info cache due to event network-changed-dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 845.358499] env[61972]: DEBUG oslo_concurrency.lockutils [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] Acquiring lock "refresh_cache-caad50a8-e0ad-4ca9-b391-691ead1756f0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.366958] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.367204] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.367357] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.367535] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.367676] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.367823] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.368024] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.368220] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.368331] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.368513] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.368636] env[61972]: DEBUG nova.virt.hardware [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.369693] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc70c0a3-a01e-45db-a551-ac8a6b8e1cd1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.377855] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4902743f-b6bc-4ab2-9ff7-738d551bb9bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.443492] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "refresh_cache-caad50a8-e0ad-4ca9-b391-691ead1756f0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.443907] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Instance network_info: |[{"id": "dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c", "address": "fa:16:3e:2f:7b:b9", "network": {"id": "bf233d31-2d98-4a4e-b60e-7744f1079f60", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-708354741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e6f816e56de421ba4a2d7de91a6550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbf9d428-9e", "ovs_interfaceid": "dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 845.444229] env[61972]: DEBUG oslo_concurrency.lockutils [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] Acquired lock "refresh_cache-caad50a8-e0ad-4ca9-b391-691ead1756f0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.444405] env[61972]: DEBUG nova.network.neutron [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Refreshing network info cache for port dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.445600] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:7b:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.452856] env[61972]: DEBUG oslo.service.loopingcall [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.455484] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.455925] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b49ce18-2403-41bb-b103-e77ab0bd8d6a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.476241] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.476241] env[61972]: value = "task-1389166" [ 845.476241] env[61972]: _type = "Task" [ 845.476241] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.487891] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.490645] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389166, 'name': CreateVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.598906] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389163, 'name': CreateSnapshot_Task, 'duration_secs': 1.056615} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.601704] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 845.602488] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda2c184-74e6-476d-bbc8-74428c649fbc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.683929] env[61972]: DEBUG nova.network.neutron [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Updated VIF entry in instance network info cache for port dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.684269] env[61972]: DEBUG nova.network.neutron [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Updating instance_info_cache with network_info: [{"id": "dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c", "address": "fa:16:3e:2f:7b:b9", "network": {"id": "bf233d31-2d98-4a4e-b60e-7744f1079f60", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-708354741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e6f816e56de421ba4a2d7de91a6550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbf9d428-9e", "ovs_interfaceid": "dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.701757] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.723412] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389165, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.792096] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.968735] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17750fd6-75f7-48c3-935b-18f06a739e2e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.976585] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840e3c2a-16c8-4a2f-bb19-781cad05c44d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.990711] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389164, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.021639] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a9dcb8-d082-4649-b244-abcbe01a8deb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.029034] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389166, 'name': CreateVM_Task, 'duration_secs': 0.325445} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.030997] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 846.031757] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.031917] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.032253] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 846.033735] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d53aeb-1866-479d-bc1d-40f7d4636ff9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.039349] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c22228f5-28d5-4629-b088-785750542025 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.043675] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 846.043675] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e15f9f-f197-1b4d-c7b5-0f9d1cb36dba" [ 846.043675] env[61972]: _type = "Task" [ 846.043675] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.051727] env[61972]: DEBUG nova.compute.provider_tree [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.060869] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e15f9f-f197-1b4d-c7b5-0f9d1cb36dba, 'name': SearchDatastore_Task, 'duration_secs': 0.00987} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.061247] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.061436] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.061633] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.061773] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.061962] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.062816] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9777c62c-9d2b-40e3-90ba-67c8204c7845 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.070147] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.070330] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.071000] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3747ee7b-fbff-496a-8204-d6bb5c90870c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.076057] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 846.076057] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ac7987-2362-31ef-2e7f-3b737b3f3f37" [ 846.076057] env[61972]: _type = "Task" [ 846.076057] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.083888] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ac7987-2362-31ef-2e7f-3b737b3f3f37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.121464] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 846.121892] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-76bc0aaf-8d5f-46b4-bc9d-aca3cdbbbadb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.131978] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 846.131978] env[61972]: value = "task-1389167" [ 846.131978] env[61972]: _type = "Task" [ 846.131978] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.138888] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389167, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.186762] env[61972]: DEBUG oslo_concurrency.lockutils [req-2588a806-72fe-47ef-91bd-9b024f7bc27c req-f89bf220-62aa-4365-877e-89783373c10d service nova] Releasing lock "refresh_cache-caad50a8-e0ad-4ca9-b391-691ead1756f0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.227880] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766523} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.227880] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 89cbc6ec-7546-443c-9abb-47940d223daa/89cbc6ec-7546-443c-9abb-47940d223daa.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 846.227880] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.227880] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-742171dc-a1b2-4de8-8e6b-133cf4f86a82 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.234340] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 846.234340] env[61972]: value = "task-1389168" [ 846.234340] env[61972]: _type = "Task" [ 846.234340] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.243365] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.291253] env[61972]: DEBUG nova.compute.manager [req-09a9a986-603f-4ea3-9221-1e2c7082d21e req-40555bb4-a394-4ec8-ba3c-8b2a624a432f service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Received event network-vif-plugged-f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 846.291495] env[61972]: DEBUG oslo_concurrency.lockutils [req-09a9a986-603f-4ea3-9221-1e2c7082d21e req-40555bb4-a394-4ec8-ba3c-8b2a624a432f service nova] Acquiring lock "3d424523-b45d-4174-ac7a-08fd653e314f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.291688] env[61972]: DEBUG oslo_concurrency.lockutils [req-09a9a986-603f-4ea3-9221-1e2c7082d21e req-40555bb4-a394-4ec8-ba3c-8b2a624a432f service nova] Lock "3d424523-b45d-4174-ac7a-08fd653e314f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.291841] env[61972]: DEBUG oslo_concurrency.lockutils [req-09a9a986-603f-4ea3-9221-1e2c7082d21e req-40555bb4-a394-4ec8-ba3c-8b2a624a432f service nova] Lock "3d424523-b45d-4174-ac7a-08fd653e314f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.292121] env[61972]: DEBUG nova.compute.manager [req-09a9a986-603f-4ea3-9221-1e2c7082d21e req-40555bb4-a394-4ec8-ba3c-8b2a624a432f service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] No waiting events found dispatching network-vif-plugged-f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 846.292328] env[61972]: WARNING nova.compute.manager [req-09a9a986-603f-4ea3-9221-1e2c7082d21e req-40555bb4-a394-4ec8-ba3c-8b2a624a432f service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Received unexpected event network-vif-plugged-f16ae0e8-600f-41e1-b72f-f6adfad3ec9e for instance with vm_state building and task_state spawning. [ 846.295369] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Releasing lock "refresh_cache-8a9a51b5-a8a5-4bda-a36c-682758f50745" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.295570] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 846.295749] env[61972]: DEBUG nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 846.295915] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 846.311353] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.489735] env[61972]: DEBUG oslo_vmware.api [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389164, 'name': PowerOnVM_Task, 'duration_secs': 1.283948} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.490030] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.490237] env[61972]: INFO nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Took 10.00 seconds to spawn the instance on the hypervisor. [ 846.490414] env[61972]: DEBUG nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 846.491205] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a932021-322e-4887-b390-99758f3bfac2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.556943] env[61972]: DEBUG nova.scheduler.client.report [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 846.586559] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ac7987-2362-31ef-2e7f-3b737b3f3f37, 'name': SearchDatastore_Task, 'duration_secs': 0.008582} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.587317] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d804d078-823f-41fa-8f3b-4bc3865e74a8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.592722] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 846.592722] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b0a598-3ca3-b344-7dc1-15e3f37bd5db" [ 846.592722] env[61972]: _type = "Task" [ 846.592722] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.599738] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b0a598-3ca3-b344-7dc1-15e3f37bd5db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.639784] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389167, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.722380] env[61972]: DEBUG nova.network.neutron [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Successfully updated port: f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.745781] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077339} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.746079] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.746911] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e10562a-ccf3-4a54-b653-5a23c01b2e08 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.769016] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 89cbc6ec-7546-443c-9abb-47940d223daa/89cbc6ec-7546-443c-9abb-47940d223daa.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.769342] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af7aa620-9b17-4b91-9ed3-b6ce68a6d698 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.789733] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 846.789733] env[61972]: value = "task-1389169" [ 846.789733] env[61972]: _type = "Task" [ 846.789733] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.797299] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.813892] env[61972]: DEBUG nova.network.neutron [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.008430] env[61972]: INFO nova.compute.manager [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Took 29.70 seconds to build instance. [ 847.062030] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.888s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.062458] env[61972]: ERROR nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Traceback (most recent call last): [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self.driver.spawn(context, instance, image_meta, [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] vm_ref = self.build_virtual_machine(instance, [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 847.062458] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] for vif in network_info: [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return self._sync_wrapper(fn, *args, **kwargs) [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self.wait() [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self[:] = self._gt.wait() [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return self._exit_event.wait() [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] result = hub.switch() [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 847.062796] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return self.greenlet.switch() [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] result = function(*args, **kwargs) [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] return func(*args, **kwargs) [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] raise e [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] nwinfo = self.network_api.allocate_for_instance( [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] created_port_ids = self._update_ports_for_instance( [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] with excutils.save_and_reraise_exception(): [ 847.063126] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] self.force_reraise() [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] raise self.value [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] updated_port = self._update_port( [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] _ensure_no_port_binding_failure(port) [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] raise exception.PortBindingFailed(port_id=port['id']) [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] nova.exception.PortBindingFailed: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. [ 847.063527] env[61972]: ERROR nova.compute.manager [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] [ 847.063860] env[61972]: DEBUG nova.compute.utils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 847.064525] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.971s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.065936] env[61972]: INFO nova.compute.claims [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.069336] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Build of instance 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba was re-scheduled: Binding failed for port 5242c0c7-f0be-48bd-9cdb-facff0c96198, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 847.069775] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 847.070009] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.070211] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.070372] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.103652] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b0a598-3ca3-b344-7dc1-15e3f37bd5db, 'name': SearchDatastore_Task, 'duration_secs': 0.04503} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.103908] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.104175] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] caad50a8-e0ad-4ca9-b391-691ead1756f0/caad50a8-e0ad-4ca9-b391-691ead1756f0.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.104435] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00314c0d-d6b2-4404-8329-99ad6fdd7f60 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.112016] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 847.112016] env[61972]: value = "task-1389170" [ 847.112016] env[61972]: _type = "Task" [ 847.112016] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.119804] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389170, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.139862] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389167, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.226070] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-3d424523-b45d-4174-ac7a-08fd653e314f" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.226237] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-3d424523-b45d-4174-ac7a-08fd653e314f" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.226524] env[61972]: DEBUG nova.network.neutron [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.303486] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.316511] env[61972]: INFO nova.compute.manager [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] [instance: 8a9a51b5-a8a5-4bda-a36c-682758f50745] Took 1.02 seconds to deallocate network for instance. [ 847.436329] env[61972]: DEBUG nova.compute.manager [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Received event network-changed-f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 847.436656] env[61972]: DEBUG nova.compute.manager [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Refreshing instance network info cache due to event network-changed-f16ae0e8-600f-41e1-b72f-f6adfad3ec9e. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 847.436946] env[61972]: DEBUG oslo_concurrency.lockutils [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] Acquiring lock "refresh_cache-3d424523-b45d-4174-ac7a-08fd653e314f" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.510850] env[61972]: DEBUG oslo_concurrency.lockutils [None req-50ac002f-c23e-48ab-ac24-3a2127115523 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.143s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.594133] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.625267] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389170, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.643023] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389167, 'name': CloneVM_Task} progress is 95%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.686747] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.767053] env[61972]: DEBUG nova.network.neutron [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.801052] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389169, 'name': ReconfigVM_Task, 'duration_secs': 0.592912} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.801365] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 89cbc6ec-7546-443c-9abb-47940d223daa/89cbc6ec-7546-443c-9abb-47940d223daa.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.802063] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3759ac4c-535d-4d07-9452-e80b6297ac80 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.808834] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 847.808834] env[61972]: value = "task-1389171" [ 847.808834] env[61972]: _type = "Task" [ 847.808834] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.816410] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389171, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.928440] env[61972]: DEBUG nova.network.neutron [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Updating instance_info_cache with network_info: [{"id": "f16ae0e8-600f-41e1-b72f-f6adfad3ec9e", "address": "fa:16:3e:a9:d0:b6", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16ae0e8-60", "ovs_interfaceid": "f16ae0e8-600f-41e1-b72f-f6adfad3ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.015128] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 848.123306] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389170, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637845} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.125746] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] caad50a8-e0ad-4ca9-b391-691ead1756f0/caad50a8-e0ad-4ca9-b391-691ead1756f0.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.125967] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.126804] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8c2aed1-9e65-4853-8f67-af8c61478a20 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.132938] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 848.132938] env[61972]: value = "task-1389172" [ 848.132938] env[61972]: _type = "Task" [ 848.132938] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.147040] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389167, 'name': CloneVM_Task, 'duration_secs': 1.898725} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.150070] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Created linked-clone VM from snapshot [ 848.150361] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.153551] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe30528f-9d8e-4d2d-b08f-d980f780f1c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.161297] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Uploading image 484207c6-3534-46e0-becb-1716957998ac {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 848.182470] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 848.182470] env[61972]: value = "vm-294831" [ 848.182470] env[61972]: _type = "VirtualMachine" [ 848.182470] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 848.182815] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-19abea16-edd5-4139-b695-0e9ddf6e9e64 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.190774] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "refresh_cache-34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.190992] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 848.191184] env[61972]: DEBUG nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 848.191348] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.194159] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lease: (returnval){ [ 848.194159] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce1617-d58d-dbaa-aa36-d3c4b4c341ab" [ 848.194159] env[61972]: _type = "HttpNfcLease" [ 848.194159] env[61972]: } obtained for exporting VM: (result){ [ 848.194159] env[61972]: value = "vm-294831" [ 848.194159] env[61972]: _type = "VirtualMachine" [ 848.194159] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 848.194440] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the lease: (returnval){ [ 848.194440] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce1617-d58d-dbaa-aa36-d3c4b4c341ab" [ 848.194440] env[61972]: _type = "HttpNfcLease" [ 848.194440] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 848.202487] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 848.202487] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce1617-d58d-dbaa-aa36-d3c4b4c341ab" [ 848.202487] env[61972]: _type = "HttpNfcLease" [ 848.202487] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 848.208886] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.313917] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62df655f-a444-4264-930e-8ffba0770c77 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.321917] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389171, 'name': Rename_Task, 'duration_secs': 0.476573} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.323737] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.324015] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f1230e3-4105-4144-a985-71b9b6f8deb7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.326061] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccafddd-dd22-4e03-85ee-bb42c1237caa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.364592] env[61972]: INFO nova.scheduler.client.report [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Deleted allocations for instance 8a9a51b5-a8a5-4bda-a36c-682758f50745 [ 848.372424] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05127584-cbe1-4983-8428-0dc4fb853645 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.375434] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 848.375434] env[61972]: value = "task-1389174" [ 848.375434] env[61972]: _type = "Task" [ 848.375434] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.385797] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b74bd34-fee0-47d2-8c1c-39ccb9cb443f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.393233] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389174, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.404353] env[61972]: DEBUG nova.compute.provider_tree [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.433799] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-3d424523-b45d-4174-ac7a-08fd653e314f" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.434303] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Instance network_info: |[{"id": "f16ae0e8-600f-41e1-b72f-f6adfad3ec9e", "address": "fa:16:3e:a9:d0:b6", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16ae0e8-60", "ovs_interfaceid": "f16ae0e8-600f-41e1-b72f-f6adfad3ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 848.434711] env[61972]: DEBUG oslo_concurrency.lockutils [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] Acquired lock "refresh_cache-3d424523-b45d-4174-ac7a-08fd653e314f" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.434711] env[61972]: DEBUG nova.network.neutron [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Refreshing network info cache for port f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.435954] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:d0:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f16ae0e8-600f-41e1-b72f-f6adfad3ec9e', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.445364] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating folder: Project (bd3c052a272742808be2bcdc71d8f62f). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.446467] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.446623] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.446838] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.447105] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.447296] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.448868] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4c75bed-5eb2-4089-b8fc-1cffe4006943 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.451289] env[61972]: INFO nova.compute.manager [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Terminating instance [ 848.463310] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created folder: Project (bd3c052a272742808be2bcdc71d8f62f) in parent group-v294799. [ 848.463310] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating folder: Instances. Parent ref: group-v294832. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.463310] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a35ffe1e-951e-4e01-b84f-83c54c0fa020 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.472967] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created folder: Instances in parent group-v294832. [ 848.473212] env[61972]: DEBUG oslo.service.loopingcall [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.473454] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.473593] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5cc1e76a-85f4-492f-b786-7c85b2e09a6c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.491851] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.491851] env[61972]: value = "task-1389177" [ 848.491851] env[61972]: _type = "Task" [ 848.491851] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.499431] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389177, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.535682] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.645856] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065237} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.646312] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.646852] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aa5159-b646-4599-b002-6857ddf854d2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.668311] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] caad50a8-e0ad-4ca9-b391-691ead1756f0/caad50a8-e0ad-4ca9-b391-691ead1756f0.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.668542] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caf6cfa6-6bbc-487f-a118-80117d1403e4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.689303] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 848.689303] env[61972]: value = "task-1389178" [ 848.689303] env[61972]: _type = "Task" [ 848.689303] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.696677] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389178, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.702241] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 848.702241] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce1617-d58d-dbaa-aa36-d3c4b4c341ab" [ 848.702241] env[61972]: _type = "HttpNfcLease" [ 848.702241] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 848.702482] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 848.702482] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ce1617-d58d-dbaa-aa36-d3c4b4c341ab" [ 848.702482] env[61972]: _type = "HttpNfcLease" [ 848.702482] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 848.703174] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efced103-e3f6-47f7-af1c-6ae15847a081 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.710536] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3308d-f571-bb91-6536-a29111685f11/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 848.710718] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3308d-f571-bb91-6536-a29111685f11/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 848.712107] env[61972]: DEBUG nova.network.neutron [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.827618] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-75b846ff-7aa0-43db-bfcd-fd6bc4b614bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.882054] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c7f3bbf0-b961-43b9-af32-51d2462911f0 tempest-ServersTestManualDisk-193004154 tempest-ServersTestManualDisk-193004154-project-member] Lock "8a9a51b5-a8a5-4bda-a36c-682758f50745" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.213s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.889274] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389174, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.907114] env[61972]: DEBUG nova.scheduler.client.report [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 848.955390] env[61972]: DEBUG nova.compute.manager [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 848.956023] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.956678] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6597bc30-87fd-4753-bfa8-097ce5d7a686 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.964409] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.964691] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18ee1930-ebfe-4d17-8609-5af96cbc14fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.971577] env[61972]: DEBUG oslo_vmware.api [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 848.971577] env[61972]: value = "task-1389179" [ 848.971577] env[61972]: _type = "Task" [ 848.971577] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.979929] env[61972]: DEBUG oslo_vmware.api [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.005092] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389177, 'name': CreateVM_Task, 'duration_secs': 0.498616} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.005092] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.005675] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.005839] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.006417] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 849.006682] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d89d7d43-f93a-4b0b-b305-304436e1d49a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.014322] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 849.014322] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a48c97-69a0-9311-9729-207b73dc3d4b" [ 849.014322] env[61972]: _type = "Task" [ 849.014322] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.024204] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a48c97-69a0-9311-9729-207b73dc3d4b, 'name': SearchDatastore_Task, 'duration_secs': 0.009437} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.026543] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.026830] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.027019] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.027173] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.027364] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.027807] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dc60bef-0e34-43da-90fe-0cef2aa41552 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.035982] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.036205] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.036933] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb93e1d-b1b7-43f4-a84d-64dd3ecb352c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.047423] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 849.047423] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e4cd15-d924-8576-7b24-cbb8037c9b32" [ 849.047423] env[61972]: _type = "Task" [ 849.047423] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.063827] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e4cd15-d924-8576-7b24-cbb8037c9b32, 'name': SearchDatastore_Task, 'duration_secs': 0.009455} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.064806] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-507b03ec-a505-4ce8-b62a-d485e8982d05 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.071154] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 849.071154] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]524afe96-5e76-c2bf-ec19-06795f0070e2" [ 849.071154] env[61972]: _type = "Task" [ 849.071154] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.082983] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]524afe96-5e76-c2bf-ec19-06795f0070e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.200199] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389178, 'name': ReconfigVM_Task, 'duration_secs': 0.446764} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.200607] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Reconfigured VM instance instance-00000044 to attach disk [datastore1] caad50a8-e0ad-4ca9-b391-691ead1756f0/caad50a8-e0ad-4ca9-b391-691ead1756f0.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.201308] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efb18029-9059-4e35-8aca-99468ee6e4d3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.210788] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 849.210788] env[61972]: value = "task-1389180" [ 849.210788] env[61972]: _type = "Task" [ 849.210788] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.214674] env[61972]: INFO nova.compute.manager [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba] Took 1.02 seconds to deallocate network for instance. [ 849.222879] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389180, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.328461] env[61972]: DEBUG nova.network.neutron [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Updated VIF entry in instance network info cache for port f16ae0e8-600f-41e1-b72f-f6adfad3ec9e. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.329094] env[61972]: DEBUG nova.network.neutron [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Updating instance_info_cache with network_info: [{"id": "f16ae0e8-600f-41e1-b72f-f6adfad3ec9e", "address": "fa:16:3e:a9:d0:b6", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16ae0e8-60", "ovs_interfaceid": "f16ae0e8-600f-41e1-b72f-f6adfad3ec9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.387949] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 849.390950] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389174, 'name': PowerOnVM_Task, 'duration_secs': 0.772998} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.391246] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.391471] env[61972]: INFO nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Took 8.73 seconds to spawn the instance on the hypervisor. [ 849.391698] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 849.392584] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fd7c15-a403-4925-9606-7d41d9d4a509 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.412366] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.412927] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 849.416373] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.419s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.417932] env[61972]: INFO nova.compute.claims [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.481929] env[61972]: DEBUG oslo_vmware.api [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389179, 'name': PowerOffVM_Task, 'duration_secs': 0.202573} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.482316] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.482594] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.482933] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9dedcc38-f6a1-4771-80b6-36c511392607 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.544930] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.545484] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.545970] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Deleting the datastore file [datastore2] 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.546486] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9671d1f9-d173-4a3b-b781-74e675fa2a0b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.553711] env[61972]: DEBUG oslo_vmware.api [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for the task: (returnval){ [ 849.553711] env[61972]: value = "task-1389182" [ 849.553711] env[61972]: _type = "Task" [ 849.553711] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.562098] env[61972]: DEBUG oslo_vmware.api [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.584635] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]524afe96-5e76-c2bf-ec19-06795f0070e2, 'name': SearchDatastore_Task, 'duration_secs': 0.011057} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.585276] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.585798] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 3d424523-b45d-4174-ac7a-08fd653e314f/3d424523-b45d-4174-ac7a-08fd653e314f.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.586742] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84876135-02c4-4017-853e-efce27227afb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.593161] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 849.593161] env[61972]: value = "task-1389183" [ 849.593161] env[61972]: _type = "Task" [ 849.593161] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.601800] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.726016] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389180, 'name': Rename_Task, 'duration_secs': 0.203206} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.727341] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.727933] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53436500-fa1b-4ea3-8353-099d54ecf396 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.735021] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 849.735021] env[61972]: value = "task-1389184" [ 849.735021] env[61972]: _type = "Task" [ 849.735021] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.745659] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.832659] env[61972]: DEBUG oslo_concurrency.lockutils [req-2e20bce6-c012-4d4b-8242-d23a0741d1a4 req-f9fcf0ca-5ab4-412a-8e52-afbc151b0cf8 service nova] Releasing lock "refresh_cache-3d424523-b45d-4174-ac7a-08fd653e314f" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.910525] env[61972]: INFO nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Took 28.15 seconds to build instance. [ 849.917285] env[61972]: DEBUG nova.compute.utils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.919618] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.920171] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 849.920378] env[61972]: DEBUG nova.network.neutron [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.037945] env[61972]: DEBUG nova.policy [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8bcceda3ff418c8188eaa0f7a066ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27839bf8968b48f2afff038b1b612f73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.065735] env[61972]: DEBUG oslo_vmware.api [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Task: {'id': task-1389182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183718} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.066027] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.066373] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.066490] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.066592] env[61972]: INFO nova.compute.manager [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 850.066809] env[61972]: DEBUG oslo.service.loopingcall [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.067029] env[61972]: DEBUG nova.compute.manager [-] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 850.067128] env[61972]: DEBUG nova.network.neutron [-] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.107410] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389183, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.246468] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.259114] env[61972]: INFO nova.scheduler.client.report [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleted allocations for instance 34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba [ 850.412606] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "89cbc6ec-7546-443c-9abb-47940d223daa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.005s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.425586] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 850.481616] env[61972]: DEBUG nova.compute.manager [req-0c571554-5255-4feb-84f2-2341b34c5445 req-a9c04a59-1f80-48d1-b25c-3cf0f8421a45 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Received event network-vif-deleted-bd88da82-32a8-4336-bc68-1faad1ca062a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 850.482329] env[61972]: INFO nova.compute.manager [req-0c571554-5255-4feb-84f2-2341b34c5445 req-a9c04a59-1f80-48d1-b25c-3cf0f8421a45 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Neutron deleted interface bd88da82-32a8-4336-bc68-1faad1ca062a; detaching it from the instance and deleting it from the info cache [ 850.482713] env[61972]: DEBUG nova.network.neutron [req-0c571554-5255-4feb-84f2-2341b34c5445 req-a9c04a59-1f80-48d1-b25c-3cf0f8421a45 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.607380] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535203} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.607380] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 3d424523-b45d-4174-ac7a-08fd653e314f/3d424523-b45d-4174-ac7a-08fd653e314f.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.607622] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.610999] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2f07602-5ed7-48c0-a4fd-f1e73f50b300 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.620731] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 850.620731] env[61972]: value = "task-1389185" [ 850.620731] env[61972]: _type = "Task" [ 850.620731] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.635392] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.706433] env[61972]: DEBUG nova.network.neutron [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Successfully created port: 515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.713820] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1eaab5-50f9-444a-a02c-74453537e686 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.722867] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94eaec41-a85e-4ddf-8e64-f484207eb7df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.759739] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd80e769-dab9-4b83-b6d0-c488e02ea602 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.768088] env[61972]: DEBUG oslo_concurrency.lockutils [None req-416edb8e-0753-47ad-a812-f27dfd53a9ae tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "34a52fe9-a3e4-4cda-85b6-9dd106bbf7ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.647s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.773025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8252e254-cd2d-4115-904d-4944ff4f8fa5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.777952] env[61972]: DEBUG oslo_vmware.api [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389184, 'name': PowerOnVM_Task, 'duration_secs': 0.822715} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.778620] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.778879] env[61972]: INFO nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Took 7.76 seconds to spawn the instance on the hypervisor. [ 850.779156] env[61972]: DEBUG nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 850.780450] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ceef0f-1a42-400b-be8e-b9b26464b695 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.797762] env[61972]: DEBUG nova.compute.provider_tree [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.879986] env[61972]: DEBUG nova.network.neutron [-] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.915896] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 850.991193] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28ea9e1e-bd7a-4cb5-b20c-7b9778d57a79 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.998987] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef329de-f081-4cee-9e2f-dc3a116bf177 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.028369] env[61972]: DEBUG nova.compute.manager [req-0c571554-5255-4feb-84f2-2341b34c5445 req-a9c04a59-1f80-48d1-b25c-3cf0f8421a45 service nova] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Detach interface failed, port_id=bd88da82-32a8-4336-bc68-1faad1ca062a, reason: Instance 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 851.130884] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074878} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.131897] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.132040] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9573eab-651e-4966-9cbb-a65f1f42a864 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.155659] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 3d424523-b45d-4174-ac7a-08fd653e314f/3d424523-b45d-4174-ac7a-08fd653e314f.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.155808] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-704d65de-50ce-46b1-b1d4-f3fcf75df0e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.176248] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 851.176248] env[61972]: value = "task-1389186" [ 851.176248] env[61972]: _type = "Task" [ 851.176248] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.186264] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.279631] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 851.304268] env[61972]: DEBUG nova.scheduler.client.report [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 851.327606] env[61972]: INFO nova.compute.manager [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Took 27.82 seconds to build instance. [ 851.383831] env[61972]: INFO nova.compute.manager [-] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Took 1.32 seconds to deallocate network for instance. [ 851.436436] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 851.442721] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.462404] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.462686] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.462856] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.463057] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.463213] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.463374] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.463622] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.463818] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.464043] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.464241] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.464434] env[61972]: DEBUG nova.virt.hardware [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.465551] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271b0cd4-baca-4077-b18a-4f310209b9f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.474425] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98d9a32-f933-463a-a4a4-e587a4f2057a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.689286] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.804423] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.816501] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.817102] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 851.821010] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.679s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.822959] env[61972]: INFO nova.compute.claims [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.831518] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e55822e5-df3f-41f8-abab-f21b0242a8d9 tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.397s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.890823] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.192154] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389186, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.335722] env[61972]: DEBUG nova.compute.utils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.340501] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 852.342596] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 852.344101] env[61972]: DEBUG nova.network.neutron [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.453259] env[61972]: DEBUG nova.policy [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f8bcceda3ff418c8188eaa0f7a066ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27839bf8968b48f2afff038b1b612f73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 852.592633] env[61972]: DEBUG nova.compute.manager [req-45abb6e1-dda5-46a6-be32-db8ff42ea7bd req-ca68cbfb-0399-4e50-87c7-daa26d98309a service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Received event network-vif-plugged-515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 852.592884] env[61972]: DEBUG oslo_concurrency.lockutils [req-45abb6e1-dda5-46a6-be32-db8ff42ea7bd req-ca68cbfb-0399-4e50-87c7-daa26d98309a service nova] Acquiring lock "e2b6dd4e-b639-4553-a45f-87c155506ea3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.593037] env[61972]: DEBUG oslo_concurrency.lockutils [req-45abb6e1-dda5-46a6-be32-db8ff42ea7bd req-ca68cbfb-0399-4e50-87c7-daa26d98309a service nova] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.593250] env[61972]: DEBUG oslo_concurrency.lockutils [req-45abb6e1-dda5-46a6-be32-db8ff42ea7bd req-ca68cbfb-0399-4e50-87c7-daa26d98309a service nova] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.593382] env[61972]: DEBUG nova.compute.manager [req-45abb6e1-dda5-46a6-be32-db8ff42ea7bd req-ca68cbfb-0399-4e50-87c7-daa26d98309a service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] No waiting events found dispatching network-vif-plugged-515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.593559] env[61972]: WARNING nova.compute.manager [req-45abb6e1-dda5-46a6-be32-db8ff42ea7bd req-ca68cbfb-0399-4e50-87c7-daa26d98309a service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Received unexpected event network-vif-plugged-515acb28-dc72-4bb7-804b-4fb5de167f99 for instance with vm_state building and task_state spawning. [ 852.616345] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "89cbc6ec-7546-443c-9abb-47940d223daa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.616615] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "89cbc6ec-7546-443c-9abb-47940d223daa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.616827] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "89cbc6ec-7546-443c-9abb-47940d223daa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.617017] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "89cbc6ec-7546-443c-9abb-47940d223daa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.617648] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "89cbc6ec-7546-443c-9abb-47940d223daa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.620135] env[61972]: INFO nova.compute.manager [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Terminating instance [ 852.666588] env[61972]: DEBUG nova.network.neutron [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Successfully updated port: 515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.691243] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389186, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.699757] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "caad50a8-e0ad-4ca9-b391-691ead1756f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.700146] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.700701] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "caad50a8-e0ad-4ca9-b391-691ead1756f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.700981] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.701243] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.703404] env[61972]: INFO nova.compute.manager [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Terminating instance [ 852.845504] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 852.871812] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.898834] env[61972]: DEBUG nova.network.neutron [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Successfully created port: c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.099441] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821ce7c0-c33f-44ea-be70-78e03fbc96df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.108288] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffba03fa-127c-4d18-9963-2e6ab513cfbc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.151749] env[61972]: DEBUG nova.compute.manager [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 853.152080] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.153325] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5219f06-2851-4615-9e91-bdc682158998 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.158496] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54360196-57f1-4ccd-b79c-744f6d3d247c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.172669] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.172669] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.172669] env[61972]: DEBUG nova.network.neutron [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.173155] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.175989] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4be43aba-954b-4ae7-aa0d-0d3329c1904d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.181212] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4f4e9f-800b-40a0-b367-5d150b910eaf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.191654] env[61972]: DEBUG oslo_vmware.api [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 853.191654] env[61972]: value = "task-1389187" [ 853.191654] env[61972]: _type = "Task" [ 853.191654] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.207959] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389186, 'name': ReconfigVM_Task, 'duration_secs': 1.805802} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.208628] env[61972]: DEBUG nova.compute.manager [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 853.208873] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.209415] env[61972]: DEBUG nova.compute.provider_tree [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.214518] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 3d424523-b45d-4174-ac7a-08fd653e314f/3d424523-b45d-4174-ac7a-08fd653e314f.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.215748] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebf0d71-3976-44a1-a1ab-e0d0c437a9a8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.219339] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6248b6fb-08ac-499a-b639-a6bd1d1cfd6f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.226740] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.230757] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c1cfdb6-4872-4f36-8afc-1beb3de258fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.235514] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 853.235514] env[61972]: value = "task-1389188" [ 853.235514] env[61972]: _type = "Task" [ 853.235514] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.235514] env[61972]: DEBUG oslo_vmware.api [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.238924] env[61972]: DEBUG oslo_vmware.api [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 853.238924] env[61972]: value = "task-1389189" [ 853.238924] env[61972]: _type = "Task" [ 853.238924] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.246872] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389188, 'name': Rename_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.252344] env[61972]: DEBUG oslo_vmware.api [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.718554] env[61972]: DEBUG oslo_vmware.api [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389187, 'name': PowerOffVM_Task, 'duration_secs': 0.204621} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.718843] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.719022] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 853.719851] env[61972]: DEBUG nova.scheduler.client.report [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 853.722818] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a3c20ff-8db2-4d7e-918d-ba9dc1df42b3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.737727] env[61972]: DEBUG nova.network.neutron [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.747086] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389188, 'name': Rename_Task, 'duration_secs': 0.178901} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.747573] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.747826] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71906c3e-af65-49f7-af47-db46236bd3dd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.752551] env[61972]: DEBUG oslo_vmware.api [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389189, 'name': PowerOffVM_Task, 'duration_secs': 0.214148} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.753145] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.753323] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 853.753561] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d06e111f-18ab-4b7a-8647-b481018f9eac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.757065] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 853.757065] env[61972]: value = "task-1389191" [ 853.757065] env[61972]: _type = "Task" [ 853.757065] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.765348] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389191, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.795213] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 853.795213] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 853.795213] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleting the datastore file [datastore2] 89cbc6ec-7546-443c-9abb-47940d223daa {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.795213] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04576284-cf72-4cd1-8804-b0baa09f1ecc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.799960] env[61972]: DEBUG oslo_vmware.api [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 853.799960] env[61972]: value = "task-1389193" [ 853.799960] env[61972]: _type = "Task" [ 853.799960] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.810525] env[61972]: DEBUG oslo_vmware.api [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.826279] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 853.826513] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 853.826695] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleting the datastore file [datastore1] caad50a8-e0ad-4ca9-b391-691ead1756f0 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.826973] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af59f274-3a8b-479b-8f60-1e54ef7bd70b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.833617] env[61972]: DEBUG oslo_vmware.api [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for the task: (returnval){ [ 853.833617] env[61972]: value = "task-1389194" [ 853.833617] env[61972]: _type = "Task" [ 853.833617] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.841613] env[61972]: DEBUG oslo_vmware.api [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.859136] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 853.897273] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.897549] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.897701] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.897881] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.898042] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.898265] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.898512] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.898720] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.898861] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.899027] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.899204] env[61972]: DEBUG nova.virt.hardware [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.900105] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c191b4-7040-47a4-a60c-fb7cbfa3adff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.908684] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cfd31b-8709-472b-8b76-073931f256d6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.975285] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.975526] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.004414] env[61972]: DEBUG nova.network.neutron [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Updating instance_info_cache with network_info: [{"id": "515acb28-dc72-4bb7-804b-4fb5de167f99", "address": "fa:16:3e:b1:34:de", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap515acb28-dc", "ovs_interfaceid": "515acb28-dc72-4bb7-804b-4fb5de167f99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.226069] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.226549] env[61972]: DEBUG nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 854.229046] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.991s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.229235] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.229391] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 854.230032] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.694s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.231195] env[61972]: INFO nova.compute.claims [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.234514] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676ab667-a8d6-4101-a3e5-bd6437de94ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.243471] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b026e21c-7f09-41e8-b6ab-c9651d173eeb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.263440] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a446ab-860d-498e-b6d4-8aab452b4a3d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.272686] env[61972]: DEBUG oslo_vmware.api [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389191, 'name': PowerOnVM_Task, 'duration_secs': 0.500287} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.274707] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.274925] env[61972]: INFO nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Took 8.93 seconds to spawn the instance on the hypervisor. [ 854.275124] env[61972]: DEBUG nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 854.276188] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ed7eaf-24e1-4edb-a7fe-74efb7b7e811 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.279327] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041acc12-655f-45ee-8443-f0e042e8f658 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.313953] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181408MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 854.314129] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.323186] env[61972]: DEBUG oslo_vmware.api [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220629} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.323448] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.323657] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.323853] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.324046] env[61972]: INFO nova.compute.manager [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Took 1.17 seconds to destroy the instance on the hypervisor. [ 854.324302] env[61972]: DEBUG oslo.service.loopingcall [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.324498] env[61972]: DEBUG nova.compute.manager [-] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 854.324598] env[61972]: DEBUG nova.network.neutron [-] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 854.343243] env[61972]: DEBUG oslo_vmware.api [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Task: {'id': task-1389194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191208} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.343829] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.344094] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.344214] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.344385] env[61972]: INFO nova.compute.manager [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 854.344626] env[61972]: DEBUG oslo.service.loopingcall [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.344831] env[61972]: DEBUG nova.compute.manager [-] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 854.344952] env[61972]: DEBUG nova.network.neutron [-] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 854.507467] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Releasing lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.507854] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Instance network_info: |[{"id": "515acb28-dc72-4bb7-804b-4fb5de167f99", "address": "fa:16:3e:b1:34:de", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap515acb28-dc", "ovs_interfaceid": "515acb28-dc72-4bb7-804b-4fb5de167f99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 854.508338] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:34:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7836a5b-a91e-4d3f-8e96-afe024f62bb5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '515acb28-dc72-4bb7-804b-4fb5de167f99', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.515848] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating folder: Project (27839bf8968b48f2afff038b1b612f73). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 854.516186] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5e73e5c-4882-4edc-97a2-203a440c9c59 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.527371] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Created folder: Project (27839bf8968b48f2afff038b1b612f73) in parent group-v294799. [ 854.527568] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating folder: Instances. Parent ref: group-v294835. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 854.527822] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8f70032-3baf-434e-997d-138a93c175c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.537776] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Created folder: Instances in parent group-v294835. [ 854.538063] env[61972]: DEBUG oslo.service.loopingcall [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.538273] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.538489] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb05ada6-8fa4-40d2-8a01-0d2d1dd75da1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.557718] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.557718] env[61972]: value = "task-1389197" [ 854.557718] env[61972]: _type = "Task" [ 854.557718] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.571811] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389197, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.595122] env[61972]: DEBUG nova.compute.manager [req-da1d64c1-910b-4ef7-a8a5-9a7e36093e79 req-6cf652e4-bd9c-467b-b733-20073da22252 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Received event network-vif-plugged-c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 854.595122] env[61972]: DEBUG oslo_concurrency.lockutils [req-da1d64c1-910b-4ef7-a8a5-9a7e36093e79 req-6cf652e4-bd9c-467b-b733-20073da22252 service nova] Acquiring lock "94bd64b9-3d20-4631-baed-4500f9beb9c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.595334] env[61972]: DEBUG oslo_concurrency.lockutils [req-da1d64c1-910b-4ef7-a8a5-9a7e36093e79 req-6cf652e4-bd9c-467b-b733-20073da22252 service nova] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.595412] env[61972]: DEBUG oslo_concurrency.lockutils [req-da1d64c1-910b-4ef7-a8a5-9a7e36093e79 req-6cf652e4-bd9c-467b-b733-20073da22252 service nova] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.595581] env[61972]: DEBUG nova.compute.manager [req-da1d64c1-910b-4ef7-a8a5-9a7e36093e79 req-6cf652e4-bd9c-467b-b733-20073da22252 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] No waiting events found dispatching network-vif-plugged-c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 854.595755] env[61972]: WARNING nova.compute.manager [req-da1d64c1-910b-4ef7-a8a5-9a7e36093e79 req-6cf652e4-bd9c-467b-b733-20073da22252 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Received unexpected event network-vif-plugged-c8a279aa-ff18-41b6-9384-5364aea002e9 for instance with vm_state building and task_state spawning. [ 854.635538] env[61972]: DEBUG nova.compute.manager [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Received event network-changed-515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 854.635763] env[61972]: DEBUG nova.compute.manager [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Refreshing instance network info cache due to event network-changed-515acb28-dc72-4bb7-804b-4fb5de167f99. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 854.639017] env[61972]: DEBUG oslo_concurrency.lockutils [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] Acquiring lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.639017] env[61972]: DEBUG oslo_concurrency.lockutils [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] Acquired lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.639017] env[61972]: DEBUG nova.network.neutron [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Refreshing network info cache for port 515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.735402] env[61972]: DEBUG nova.compute.utils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.736824] env[61972]: DEBUG nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 854.793462] env[61972]: DEBUG nova.network.neutron [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Successfully updated port: c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.827502] env[61972]: INFO nova.compute.manager [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Took 29.50 seconds to build instance. [ 855.067837] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389197, 'name': CreateVM_Task, 'duration_secs': 0.409508} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.068175] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.068889] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.069130] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.069546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 855.069850] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-080a8d74-3e8c-45b0-a259-9d3d88f11336 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.074794] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 855.074794] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c9d993-bfbf-cf5d-0a63-0475b2a214db" [ 855.074794] env[61972]: _type = "Task" [ 855.074794] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.082983] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c9d993-bfbf-cf5d-0a63-0475b2a214db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.114549] env[61972]: DEBUG nova.network.neutron [-] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.238208] env[61972]: DEBUG nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 855.297761] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "refresh_cache-94bd64b9-3d20-4631-baed-4500f9beb9c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.297941] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired lock "refresh_cache-94bd64b9-3d20-4631-baed-4500f9beb9c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.298146] env[61972]: DEBUG nova.network.neutron [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.330148] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55faf35c-9f56-4adc-ad57-3ea5753268d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.974s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.356262] env[61972]: DEBUG nova.network.neutron [-] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.381213] env[61972]: DEBUG nova.network.neutron [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Updated VIF entry in instance network info cache for port 515acb28-dc72-4bb7-804b-4fb5de167f99. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.381809] env[61972]: DEBUG nova.network.neutron [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Updating instance_info_cache with network_info: [{"id": "515acb28-dc72-4bb7-804b-4fb5de167f99", "address": "fa:16:3e:b1:34:de", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap515acb28-dc", "ovs_interfaceid": "515acb28-dc72-4bb7-804b-4fb5de167f99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.491700] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ea1103-701a-4a61-b6d6-a5d1cde8fb22 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.499291] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816ce102-b800-4a04-b121-940ca995a786 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.531382] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45aa6839-f92c-4577-8144-a8509b37fba5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.539389] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef267a26-7676-40dd-95d8-3294713b8091 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.553248] env[61972]: DEBUG nova.compute.provider_tree [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.585319] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c9d993-bfbf-cf5d-0a63-0475b2a214db, 'name': SearchDatastore_Task, 'duration_secs': 0.01643} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.585588] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.585840] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.586090] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.586246] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.586431] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.586696] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73a64f64-ab60-4b51-bacd-45d21096cc51 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.596227] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.596461] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.597212] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09568452-efcf-4a49-9ccf-ec6791225611 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.602563] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 855.602563] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]521efea1-123a-34b3-5325-bfbcf9b0a0c4" [ 855.602563] env[61972]: _type = "Task" [ 855.602563] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.610774] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]521efea1-123a-34b3-5325-bfbcf9b0a0c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.617449] env[61972]: INFO nova.compute.manager [-] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Took 1.29 seconds to deallocate network for instance. [ 855.831690] env[61972]: DEBUG nova.network.neutron [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.833887] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 855.859096] env[61972]: INFO nova.compute.manager [-] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Took 1.51 seconds to deallocate network for instance. [ 855.887328] env[61972]: DEBUG oslo_concurrency.lockutils [req-fdd5bd3c-e133-417d-a79e-e6cbeffe1546 req-e65a6616-0fc8-40a1-82d9-9aaa10ea4951 service nova] Releasing lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.985878] env[61972]: DEBUG nova.network.neutron [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Updating instance_info_cache with network_info: [{"id": "c8a279aa-ff18-41b6-9384-5364aea002e9", "address": "fa:16:3e:65:fb:cb", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a279aa-ff", "ovs_interfaceid": "c8a279aa-ff18-41b6-9384-5364aea002e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.058069] env[61972]: DEBUG nova.scheduler.client.report [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 856.113923] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]521efea1-123a-34b3-5325-bfbcf9b0a0c4, 'name': SearchDatastore_Task, 'duration_secs': 0.014577} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.114793] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29309d16-01cc-4a1b-9dfb-fcefdec67120 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.120278] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 856.120278] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5242e1e7-554a-85eb-3877-bccec34a44f0" [ 856.120278] env[61972]: _type = "Task" [ 856.120278] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.124418] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.129413] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5242e1e7-554a-85eb-3877-bccec34a44f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.252938] env[61972]: DEBUG nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 856.274184] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.274455] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.274614] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.274797] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.274963] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.275163] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.275383] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.275551] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.275721] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.275913] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.276078] env[61972]: DEBUG nova.virt.hardware [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.276940] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c36b37-93c2-4c55-b5c2-ff8a36daa730 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.286607] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b93752-c62b-4842-b878-93bc42291336 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.299753] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.305293] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Creating folder: Project (5184d6320f674c23841016c3c599ac7f). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.305586] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d91cfee-29bf-4223-aec3-008458ee46e1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.314796] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Created folder: Project (5184d6320f674c23841016c3c599ac7f) in parent group-v294799. [ 856.315020] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Creating folder: Instances. Parent ref: group-v294838. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.315257] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aef5fa6a-748b-4621-a4ab-ff61945bbebe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.323142] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Created folder: Instances in parent group-v294838. [ 856.323367] env[61972]: DEBUG oslo.service.loopingcall [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.323556] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.323785] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19c0ab2d-28a4-4e84-8487-cba81d11101a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.341260] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.341260] env[61972]: value = "task-1389200" [ 856.341260] env[61972]: _type = "Task" [ 856.341260] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.353087] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389200, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.355581] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.366836] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.416026] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3308d-f571-bb91-6536-a29111685f11/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 856.417048] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c02b66-daad-44ee-88a3-7e472bc2ef36 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.423291] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3308d-f571-bb91-6536-a29111685f11/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 856.423477] env[61972]: ERROR oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3308d-f571-bb91-6536-a29111685f11/disk-0.vmdk due to incomplete transfer. [ 856.423710] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f8b90a51-3825-4c6c-8701-d9344f0b3f0d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.429955] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d3308d-f571-bb91-6536-a29111685f11/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 856.430171] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Uploaded image 484207c6-3534-46e0-becb-1716957998ac to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 856.432368] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 856.432629] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5a08007b-9e34-4cf3-87b9-c7cdfbffa71d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.437710] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 856.437710] env[61972]: value = "task-1389201" [ 856.437710] env[61972]: _type = "Task" [ 856.437710] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.444931] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389201, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.488764] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Releasing lock "refresh_cache-94bd64b9-3d20-4631-baed-4500f9beb9c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.489254] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Instance network_info: |[{"id": "c8a279aa-ff18-41b6-9384-5364aea002e9", "address": "fa:16:3e:65:fb:cb", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a279aa-ff", "ovs_interfaceid": "c8a279aa-ff18-41b6-9384-5364aea002e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 856.489787] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:fb:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7836a5b-a91e-4d3f-8e96-afe024f62bb5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8a279aa-ff18-41b6-9384-5364aea002e9', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.498656] env[61972]: DEBUG oslo.service.loopingcall [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.499143] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.499500] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1dd1554d-44b1-41f1-b3ca-483716d95cfe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.525931] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.525931] env[61972]: value = "task-1389202" [ 856.525931] env[61972]: _type = "Task" [ 856.525931] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.535977] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389202, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.562139] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.562733] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 856.565482] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.646s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.566857] env[61972]: INFO nova.compute.claims [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.627100] env[61972]: DEBUG nova.compute.manager [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Received event network-changed-c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 856.627309] env[61972]: DEBUG nova.compute.manager [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Refreshing instance network info cache due to event network-changed-c8a279aa-ff18-41b6-9384-5364aea002e9. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 856.627534] env[61972]: DEBUG oslo_concurrency.lockutils [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] Acquiring lock "refresh_cache-94bd64b9-3d20-4631-baed-4500f9beb9c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.627669] env[61972]: DEBUG oslo_concurrency.lockutils [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] Acquired lock "refresh_cache-94bd64b9-3d20-4631-baed-4500f9beb9c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.627828] env[61972]: DEBUG nova.network.neutron [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Refreshing network info cache for port c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.633121] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5242e1e7-554a-85eb-3877-bccec34a44f0, 'name': SearchDatastore_Task, 'duration_secs': 0.015885} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.633579] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.633940] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] e2b6dd4e-b639-4553-a45f-87c155506ea3/e2b6dd4e-b639-4553-a45f-87c155506ea3.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.634272] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2614723b-bc5c-41d0-b272-e285a4f64c01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.642200] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 856.642200] env[61972]: value = "task-1389203" [ 856.642200] env[61972]: _type = "Task" [ 856.642200] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.650768] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389203, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.658975] env[61972]: DEBUG nova.compute.manager [req-ae995560-34ec-4ad0-9bda-5580c4d9541a req-88683c16-7c6f-485e-bfe1-f245b28084e5 service nova] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Received event network-vif-deleted-51a88e7f-2e39-4674-9816-01cbb3bb08f1 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 856.769609] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "3d424523-b45d-4174-ac7a-08fd653e314f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.769854] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.854470] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389200, 'name': CreateVM_Task, 'duration_secs': 0.509217} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.854772] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.855310] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.855548] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.855975] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.856292] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8174a243-0fb1-45a0-a899-b42edabf218e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.861293] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 856.861293] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]528aec8c-9b20-3496-7bf2-82d352df6f6a" [ 856.861293] env[61972]: _type = "Task" [ 856.861293] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.869799] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]528aec8c-9b20-3496-7bf2-82d352df6f6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.947891] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389201, 'name': Destroy_Task, 'duration_secs': 0.487822} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.948255] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Destroyed the VM [ 856.948634] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 856.948911] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9082195a-23aa-4f06-a490-3d2d1502b5d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.956106] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 856.956106] env[61972]: value = "task-1389204" [ 856.956106] env[61972]: _type = "Task" [ 856.956106] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.964869] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389204, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.035692] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389202, 'name': CreateVM_Task, 'duration_secs': 0.341184} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.035895] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.036654] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.036827] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.038041] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.038041] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f18753cf-be0d-48c9-9950-6ad9462ae178 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.043023] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 857.043023] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52031b1a-2f31-cf9a-84bb-108a501abdc1" [ 857.043023] env[61972]: _type = "Task" [ 857.043023] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.051412] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52031b1a-2f31-cf9a-84bb-108a501abdc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.071200] env[61972]: DEBUG nova.compute.utils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.074926] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 857.075121] env[61972]: DEBUG nova.network.neutron [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.122461] env[61972]: DEBUG nova.policy [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af9821911153489fb5392957eef6d16e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '300010fae08d4c8aa733d491ff9acbfc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 857.152688] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389203, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.273898] env[61972]: DEBUG nova.compute.utils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.371624] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]528aec8c-9b20-3496-7bf2-82d352df6f6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010869} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.374435] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.374686] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.374989] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.375165] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.375346] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.375676] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2aa95e42-6cff-40f9-99af-5277d6d49144 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.387532] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.387769] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.388609] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1157997-7b18-4247-abee-87e3b62f762b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.395420] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 857.395420] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]520e0043-72c4-b40b-f0f4-2abe46c7a0c5" [ 857.395420] env[61972]: _type = "Task" [ 857.395420] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.403173] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]520e0043-72c4-b40b-f0f4-2abe46c7a0c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.466056] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389204, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.504533] env[61972]: DEBUG nova.network.neutron [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Updated VIF entry in instance network info cache for port c8a279aa-ff18-41b6-9384-5364aea002e9. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.505016] env[61972]: DEBUG nova.network.neutron [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Updating instance_info_cache with network_info: [{"id": "c8a279aa-ff18-41b6-9384-5364aea002e9", "address": "fa:16:3e:65:fb:cb", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a279aa-ff", "ovs_interfaceid": "c8a279aa-ff18-41b6-9384-5364aea002e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.558178] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52031b1a-2f31-cf9a-84bb-108a501abdc1, 'name': SearchDatastore_Task, 'duration_secs': 0.054326} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.558535] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.558769] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.559239] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.559239] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.559397] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.559595] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0deb6641-ed93-4c16-a64f-192fbd61acb5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.575570] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 857.578380] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.578608] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.583141] env[61972]: DEBUG nova.network.neutron [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Successfully created port: 1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.586498] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e60a575-e8db-43df-b829-5f13d8a947da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.593561] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 857.593561] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]529cdff8-537f-ca7c-a6d1-fe96c56eca14" [ 857.593561] env[61972]: _type = "Task" [ 857.593561] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.601876] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529cdff8-537f-ca7c-a6d1-fe96c56eca14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.654850] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389203, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578969} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.655042] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] e2b6dd4e-b639-4553-a45f-87c155506ea3/e2b6dd4e-b639-4553-a45f-87c155506ea3.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.655268] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.657116] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8546354-aa0d-4ce6-ab50-d02ac4208a76 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.662262] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 857.662262] env[61972]: value = "task-1389205" [ 857.662262] env[61972]: _type = "Task" [ 857.662262] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.672577] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389205, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.776971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.838149] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d2c7d8-475b-4d6a-a196-a888ac9cc956 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.846491] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a654abc-3f05-4789-ae4e-c06dfca576c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.878162] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f6050c-fe21-477e-8c00-0ff5a413ed4f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.886660] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722648fb-14ae-4911-98a8-6b8cc7d8bb2c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.900283] env[61972]: DEBUG nova.compute.provider_tree [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.910021] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]520e0043-72c4-b40b-f0f4-2abe46c7a0c5, 'name': SearchDatastore_Task, 'duration_secs': 0.031484} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.911337] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e6d18f4-4607-4d2b-bf05-f79fd1048519 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.919184] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 857.919184] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52232ca9-da3d-add8-4bba-526e90e495f7" [ 857.919184] env[61972]: _type = "Task" [ 857.919184] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.927782] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52232ca9-da3d-add8-4bba-526e90e495f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.967142] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389204, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.008325] env[61972]: DEBUG oslo_concurrency.lockutils [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] Releasing lock "refresh_cache-94bd64b9-3d20-4631-baed-4500f9beb9c2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.008325] env[61972]: DEBUG nova.compute.manager [req-7edbf4ad-a48a-4e27-84f4-be2ee34614a5 req-e04f024e-2d54-46de-a78c-b5309e3a6ff3 service nova] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Received event network-vif-deleted-dbf9d428-9eb9-4f7d-b29a-5f45eaedae8c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 858.109401] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529cdff8-537f-ca7c-a6d1-fe96c56eca14, 'name': SearchDatastore_Task, 'duration_secs': 0.015841} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.111605] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db6c1da5-6c52-4260-b0ee-e0e3870eff63 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.117204] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 858.117204] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]529582d6-9375-8a7f-f8cb-6f4223edd4af" [ 858.117204] env[61972]: _type = "Task" [ 858.117204] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.126990] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529582d6-9375-8a7f-f8cb-6f4223edd4af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.175629] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389205, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.191682} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.175951] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.178174] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd6058e-6b06-4e2b-81b2-7d35ad2c2512 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.200240] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] e2b6dd4e-b639-4553-a45f-87c155506ea3/e2b6dd4e-b639-4553-a45f-87c155506ea3.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.200557] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47950cbd-d829-457d-b74e-685f710a0756 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.219872] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 858.219872] env[61972]: value = "task-1389206" [ 858.219872] env[61972]: _type = "Task" [ 858.219872] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.227796] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.406235] env[61972]: DEBUG nova.scheduler.client.report [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 858.432674] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52232ca9-da3d-add8-4bba-526e90e495f7, 'name': SearchDatastore_Task, 'duration_secs': 0.019971} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.433065] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.433401] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.434631] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e80d2fde-3ff7-414b-8ea7-d459310bcc5d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.441934] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 858.441934] env[61972]: value = "task-1389207" [ 858.441934] env[61972]: _type = "Task" [ 858.441934] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.450971] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.469417] env[61972]: DEBUG oslo_vmware.api [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389204, 'name': RemoveSnapshot_Task, 'duration_secs': 1.223631} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.469988] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 858.469988] env[61972]: INFO nova.compute.manager [None req-2d971d4b-4860-4dde-81bb-1677539a8446 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Took 14.93 seconds to snapshot the instance on the hypervisor. [ 858.510569] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.510569] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.587742] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 858.612658] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 858.613012] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 858.613190] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.613380] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 858.613529] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.613705] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 858.613939] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 858.614134] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 858.614311] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 858.614476] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 858.614650] env[61972]: DEBUG nova.virt.hardware [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 858.615524] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b31b8eb-17e5-4ec2-b913-91bd0bff2482 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.629258] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511787d1-6fea-4b4c-b12a-c44edfcde1b6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.632913] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529582d6-9375-8a7f-f8cb-6f4223edd4af, 'name': SearchDatastore_Task, 'duration_secs': 0.069758} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.633493] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.633804] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 94bd64b9-3d20-4631-baed-4500f9beb9c2/94bd64b9-3d20-4631-baed-4500f9beb9c2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.634396] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bf71a0b-1d2b-4b85-a8c3-2ed7d0bdec5c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.649408] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 858.649408] env[61972]: value = "task-1389208" [ 858.649408] env[61972]: _type = "Task" [ 858.649408] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.656796] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.730020] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389206, 'name': ReconfigVM_Task, 'duration_secs': 0.257809} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.730298] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Reconfigured VM instance instance-00000046 to attach disk [datastore2] e2b6dd4e-b639-4553-a45f-87c155506ea3/e2b6dd4e-b639-4553-a45f-87c155506ea3.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.730962] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92651d07-d834-46a4-825e-14aed8265063 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.737750] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 858.737750] env[61972]: value = "task-1389209" [ 858.737750] env[61972]: _type = "Task" [ 858.737750] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.749679] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389209, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.850892] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "3d424523-b45d-4174-ac7a-08fd653e314f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.851196] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.851437] env[61972]: INFO nova.compute.manager [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Attaching volume 766d047e-033a-4781-8bda-7ae1a40449cf to /dev/sdb [ 858.888341] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ae3605-9624-4433-9971-c59fef8a37fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.895141] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af136e25-e0d0-4f83-8a3c-936b9ac2de2e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.908160] env[61972]: DEBUG nova.virt.block_device [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Updating existing volume attachment record: d1ebe06f-a84c-4df6-8525-7bf81ab4732b {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 858.911845] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.912345] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 858.915097] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.475s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.916338] env[61972]: INFO nova.compute.claims [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.954697] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389207, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.026851] env[61972]: DEBUG nova.compute.manager [req-62140501-0971-402a-9664-4759cb99b13e req-073175db-1cbd-4ae3-bbf6-2aa8ed6d4f47 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received event network-vif-plugged-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 859.027180] env[61972]: DEBUG oslo_concurrency.lockutils [req-62140501-0971-402a-9664-4759cb99b13e req-073175db-1cbd-4ae3-bbf6-2aa8ed6d4f47 service nova] Acquiring lock "21440243-458c-4640-b0ba-8f3b8b1b0720-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.027526] env[61972]: DEBUG oslo_concurrency.lockutils [req-62140501-0971-402a-9664-4759cb99b13e req-073175db-1cbd-4ae3-bbf6-2aa8ed6d4f47 service nova] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.028649] env[61972]: DEBUG oslo_concurrency.lockutils [req-62140501-0971-402a-9664-4759cb99b13e req-073175db-1cbd-4ae3-bbf6-2aa8ed6d4f47 service nova] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.028649] env[61972]: DEBUG nova.compute.manager [req-62140501-0971-402a-9664-4759cb99b13e req-073175db-1cbd-4ae3-bbf6-2aa8ed6d4f47 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] No waiting events found dispatching network-vif-plugged-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 859.029615] env[61972]: WARNING nova.compute.manager [req-62140501-0971-402a-9664-4759cb99b13e req-073175db-1cbd-4ae3-bbf6-2aa8ed6d4f47 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received unexpected event network-vif-plugged-1296b6ff-7e29-4bc6-8230-f6b7696702f8 for instance with vm_state building and task_state spawning. [ 859.141797] env[61972]: DEBUG nova.network.neutron [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Successfully updated port: 1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.163430] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389208, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.248018] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389209, 'name': Rename_Task, 'duration_secs': 0.139125} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.249040] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.249040] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-696c8375-1b9f-452e-b8d7-95e94c8a21ac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.255516] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 859.255516] env[61972]: value = "task-1389213" [ 859.255516] env[61972]: _type = "Task" [ 859.255516] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.264976] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.421228] env[61972]: DEBUG nova.compute.utils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.425551] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 859.425783] env[61972]: DEBUG nova.network.neutron [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.453331] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.943158} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.454027] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.454027] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.454233] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66ac1465-cd7f-4158-9934-4e10b0ea355e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.461406] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 859.461406] env[61972]: value = "task-1389214" [ 859.461406] env[61972]: _type = "Task" [ 859.461406] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.468988] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389214, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.474963] env[61972]: DEBUG nova.policy [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ed5336d22ef451e842b188bdd50f353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fd99c56733940dda5267401c71b9e5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 859.653064] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.653064] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.653064] env[61972]: DEBUG nova.network.neutron [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.663900] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.845759} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.664170] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 94bd64b9-3d20-4631-baed-4500f9beb9c2/94bd64b9-3d20-4631-baed-4500f9beb9c2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.664384] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.664771] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-025fef6c-439a-4f3b-9378-d64015e08341 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.672606] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 859.672606] env[61972]: value = "task-1389215" [ 859.672606] env[61972]: _type = "Task" [ 859.672606] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.680525] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.731035] env[61972]: DEBUG nova.network.neutron [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Successfully created port: 96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.765850] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389213, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.926583] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 859.974895] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389214, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072705} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.976096] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.976820] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b12a726-4aa9-4e58-bd38-db8189fdbee1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.998058] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.000528] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3574bb50-cca4-47cd-9493-c2167467870c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.021619] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 860.021619] env[61972]: value = "task-1389216" [ 860.021619] env[61972]: _type = "Task" [ 860.021619] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.030581] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.179971] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bdc21c-dd82-4ff4-8f64-475fc4ee01e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.183763] env[61972]: DEBUG nova.network.neutron [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.189847] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074527} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.190798] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd3f9b7-1347-46f2-9fca-fb6aa5a069c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.194033] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.195048] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987b5cdc-80df-44a2-8f52-787f9c1347fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.216432] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 94bd64b9-3d20-4631-baed-4500f9beb9c2/94bd64b9-3d20-4631-baed-4500f9beb9c2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.241088] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11db6cc4-86f9-4ad6-b3ac-6ee9cbd9a84d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.256121] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e233a0-0ab8-400a-b75a-970682eb284a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.263211] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 860.263211] env[61972]: value = "task-1389217" [ 860.263211] env[61972]: _type = "Task" [ 860.263211] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.271031] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90273685-1130-49aa-8268-076a6b2a11a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.275444] env[61972]: DEBUG oslo_vmware.api [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389213, 'name': PowerOnVM_Task, 'duration_secs': 0.62203} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.276899] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.277132] env[61972]: INFO nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Took 8.84 seconds to spawn the instance on the hypervisor. [ 860.277320] env[61972]: DEBUG nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 860.278752] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280f34ee-5656-4fb5-bcd0-ea041221ac82 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.292081] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.292506] env[61972]: DEBUG nova.compute.provider_tree [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.388998] env[61972]: DEBUG nova.network.neutron [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.532457] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389216, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.776231] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389217, 'name': ReconfigVM_Task, 'duration_secs': 0.332875} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.776530] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 94bd64b9-3d20-4631-baed-4500f9beb9c2/94bd64b9-3d20-4631-baed-4500f9beb9c2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.777193] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a775d8d-1559-4797-97b8-d9907a3cbed7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.783522] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 860.783522] env[61972]: value = "task-1389218" [ 860.783522] env[61972]: _type = "Task" [ 860.783522] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.791171] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389218, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.796108] env[61972]: DEBUG nova.scheduler.client.report [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 860.807099] env[61972]: INFO nova.compute.manager [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Took 28.74 seconds to build instance. [ 860.891552] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.891899] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Instance network_info: |[{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 860.892449] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:ae:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1296b6ff-7e29-4bc6-8230-f6b7696702f8', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.900243] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Creating folder: Project (300010fae08d4c8aa733d491ff9acbfc). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.900535] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d70c2c58-c09d-4d8a-a078-dd98894378ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.910898] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Created folder: Project (300010fae08d4c8aa733d491ff9acbfc) in parent group-v294799. [ 860.911103] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Creating folder: Instances. Parent ref: group-v294844. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.911348] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b98e0741-a2f0-4bde-af7f-bd57ca5e4dc5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.919851] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Created folder: Instances in parent group-v294844. [ 860.920118] env[61972]: DEBUG oslo.service.loopingcall [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.920348] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.920689] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a23f67c0-99af-407d-9fec-1102f1590aa3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.937272] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 860.944207] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.944207] env[61972]: value = "task-1389221" [ 860.944207] env[61972]: _type = "Task" [ 860.944207] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.956923] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389221, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.965363] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 860.965663] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 860.965856] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.966066] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 860.966254] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.966409] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 860.966625] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 860.966806] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 860.967016] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 860.967223] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 860.967405] env[61972]: DEBUG nova.virt.hardware [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.968231] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef82d3b-2ae8-4e0f-993d-f7a0d4a22042 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.976916] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c16e3ab-f5b7-452d-99e6-b01d48d99649 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.031086] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389216, 'name': ReconfigVM_Task, 'duration_secs': 0.972278} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.031408] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.032087] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5072a302-063d-44ea-8f3d-8c92292703b1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.038523] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 861.038523] env[61972]: value = "task-1389222" [ 861.038523] env[61972]: _type = "Task" [ 861.038523] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.047121] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389222, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.053769] env[61972]: DEBUG nova.compute.manager [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 861.055128] env[61972]: DEBUG nova.compute.manager [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing instance network info cache due to event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 861.055128] env[61972]: DEBUG oslo_concurrency.lockutils [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.055128] env[61972]: DEBUG oslo_concurrency.lockutils [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.055128] env[61972]: DEBUG nova.network.neutron [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.214982] env[61972]: DEBUG nova.network.neutron [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Successfully updated port: 96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.293844] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389218, 'name': Rename_Task, 'duration_secs': 0.174433} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.294138] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.294388] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d73e831-6884-4e11-bb4d-c70c35269a7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.300560] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.301063] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 861.303518] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 861.303518] env[61972]: value = "task-1389224" [ 861.303518] env[61972]: _type = "Task" [ 861.303518] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.303799] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.500s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.305149] env[61972]: INFO nova.compute.claims [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.310703] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d03f1a64-19f2-4757-941a-887b23e54bd4 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.743s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.317131] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389224, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.460719] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389221, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.547924] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389222, 'name': Rename_Task, 'duration_secs': 0.172727} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.549180] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.549180] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d35c869d-6f01-4b42-bad5-a436237ef58c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.554641] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 861.554641] env[61972]: value = "task-1389225" [ 861.554641] env[61972]: _type = "Task" [ 861.554641] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.563961] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.718350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.718350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.718350] env[61972]: DEBUG nova.network.neutron [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.776490] env[61972]: DEBUG nova.network.neutron [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updated VIF entry in instance network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 861.776972] env[61972]: DEBUG nova.network.neutron [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.809594] env[61972]: DEBUG nova.compute.utils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 861.819117] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 861.819336] env[61972]: DEBUG nova.network.neutron [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.821274] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 861.836472] env[61972]: DEBUG oslo_vmware.api [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389224, 'name': PowerOnVM_Task, 'duration_secs': 0.526049} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.836472] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.836609] env[61972]: INFO nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Took 7.98 seconds to spawn the instance on the hypervisor. [ 861.836666] env[61972]: DEBUG nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 861.837486] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e98470-0242-4d05-906c-7e41670e1fcd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.874172] env[61972]: DEBUG nova.policy [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cefef67f4ae0451aaa108df20aa7a3db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a685a448ff041db8bc49b4429688e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 861.954747] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389221, 'name': CreateVM_Task, 'duration_secs': 0.52709} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.955163] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.957898] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.958082] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.958396] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.959234] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5c91110-a581-4c18-a8c9-29f4ae4c4b91 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.964259] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 861.964259] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]526ea2b8-444a-22e0-555d-b4e3019b1786" [ 861.964259] env[61972]: _type = "Task" [ 861.964259] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.973465] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]526ea2b8-444a-22e0-555d-b4e3019b1786, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.069251] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389225, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.085530] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b0e4ee-0780-438c-b5c1-439b6b71184c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.093591] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1d0ed1-13ac-4773-a53a-408361ebef7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.124934] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d807b3-4241-4779-bd05-6d65245c8d5b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.133350] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c7e778-d722-481c-b43d-976f1f4016ca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.147705] env[61972]: DEBUG nova.compute.provider_tree [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.149654] env[61972]: DEBUG nova.network.neutron [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Successfully created port: db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.267271] env[61972]: DEBUG nova.network.neutron [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.280136] env[61972]: DEBUG oslo_concurrency.lockutils [req-c242c536-c96f-4589-843d-25d864dc8875 req-357d4e01-e108-493f-8d83-097534f450d0 service nova] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.327575] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 862.345475] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.365248] env[61972]: INFO nova.compute.manager [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Took 21.40 seconds to build instance. [ 862.475163] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]526ea2b8-444a-22e0-555d-b4e3019b1786, 'name': SearchDatastore_Task, 'duration_secs': 0.01097} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.475493] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.475731] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.475965] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.476128] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.476312] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.476581] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f052799-8526-4c46-92d0-393f85afc835 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.483269] env[61972]: DEBUG nova.network.neutron [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.485416] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.485593] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.486524] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c7db6ed-6f22-4694-aba4-a2c42e369962 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.493588] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 862.493588] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52817bc0-380d-2fd8-6c1f-53ff74ba90bc" [ 862.493588] env[61972]: _type = "Task" [ 862.493588] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.502376] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52817bc0-380d-2fd8-6c1f-53ff74ba90bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.568148] env[61972]: DEBUG oslo_vmware.api [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389225, 'name': PowerOnVM_Task, 'duration_secs': 0.587436} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.568148] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.568148] env[61972]: INFO nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Took 6.31 seconds to spawn the instance on the hypervisor. [ 862.568148] env[61972]: DEBUG nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 862.568433] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54cab74-4bc7-4deb-8cf0-7a27fe59a99a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.653445] env[61972]: DEBUG nova.scheduler.client.report [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 862.868201] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a9ca75bc-42a2-498e-aa7d-196673f6d54d tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.082s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.987518] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.987855] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance network_info: |[{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 862.988328] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:b3:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96b44391-970b-458b-bb63-47288e6d18a2', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.995776] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating folder: Project (3fd99c56733940dda5267401c71b9e5d). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 862.996203] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45a8b69a-9d85-4450-9372-ccebcd623bfe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.007931] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52817bc0-380d-2fd8-6c1f-53ff74ba90bc, 'name': SearchDatastore_Task, 'duration_secs': 0.010976} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.008798] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4179ffe3-5eec-4805-a176-f76586162e3f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.012481] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Created folder: Project (3fd99c56733940dda5267401c71b9e5d) in parent group-v294799. [ 863.012698] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating folder: Instances. Parent ref: group-v294847. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.013229] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-108e8f52-cd1b-48c9-a30a-ef3b1f9fe336 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.016071] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 863.016071] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52614267-d14f-2ef8-8df8-28fde489ca26" [ 863.016071] env[61972]: _type = "Task" [ 863.016071] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.024335] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52614267-d14f-2ef8-8df8-28fde489ca26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.025758] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Created folder: Instances in parent group-v294847. [ 863.025999] env[61972]: DEBUG oslo.service.loopingcall [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.026224] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.026479] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a1640fe-97a4-4fd3-906b-0530d462167b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.044977] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.044977] env[61972]: value = "task-1389228" [ 863.044977] env[61972]: _type = "Task" [ 863.044977] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.052675] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389228, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.084409] env[61972]: INFO nova.compute.manager [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Took 19.96 seconds to build instance. [ 863.144035] env[61972]: DEBUG nova.compute.manager [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-vif-plugged-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 863.144576] env[61972]: DEBUG oslo_concurrency.lockutils [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.144849] env[61972]: DEBUG oslo_concurrency.lockutils [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.145121] env[61972]: DEBUG oslo_concurrency.lockutils [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.145310] env[61972]: DEBUG nova.compute.manager [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] No waiting events found dispatching network-vif-plugged-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.145480] env[61972]: WARNING nova.compute.manager [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received unexpected event network-vif-plugged-96b44391-970b-458b-bb63-47288e6d18a2 for instance with vm_state building and task_state spawning. [ 863.145645] env[61972]: DEBUG nova.compute.manager [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-changed-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 863.145803] env[61972]: DEBUG nova.compute.manager [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Refreshing instance network info cache due to event network-changed-96b44391-970b-458b-bb63-47288e6d18a2. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 863.145990] env[61972]: DEBUG oslo_concurrency.lockutils [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.146199] env[61972]: DEBUG oslo_concurrency.lockutils [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.146328] env[61972]: DEBUG nova.network.neutron [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Refreshing network info cache for port 96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.158332] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.854s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.159109] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 863.162968] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.272s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.163358] env[61972]: DEBUG nova.objects.instance [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lazy-loading 'resources' on Instance uuid 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.340358] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 863.362221] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.362510] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.362704] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.362907] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.363071] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.363228] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.363439] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.363598] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.363785] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.363965] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.364156] env[61972]: DEBUG nova.virt.hardware [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.365083] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8279807-a5c0-42b6-a2d4-566d29f4bd9b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.372774] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 863.377336] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8001f274-d1f2-458c-8ee6-a6b5824ebb9d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.457183] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Volume attach. Driver type: vmdk {{(pid=61972) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 863.457449] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294843', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'name': 'volume-766d047e-033a-4781-8bda-7ae1a40449cf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3d424523-b45d-4174-ac7a-08fd653e314f', 'attached_at': '', 'detached_at': '', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'serial': '766d047e-033a-4781-8bda-7ae1a40449cf'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 863.458341] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519253f4-ddc6-448f-b357-908a44a4db54 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.475416] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed061ae8-8629-45a0-81d7-5ba4241fae32 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.502157] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] volume-766d047e-033a-4781-8bda-7ae1a40449cf/volume-766d047e-033a-4781-8bda-7ae1a40449cf.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.502366] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56825efa-fa3b-4ce0-8f2c-ce32c41a9c61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.521573] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 863.521573] env[61972]: value = "task-1389229" [ 863.521573] env[61972]: _type = "Task" [ 863.521573] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.533185] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52614267-d14f-2ef8-8df8-28fde489ca26, 'name': SearchDatastore_Task, 'duration_secs': 0.02095} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.533989] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389229, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.533989] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.533989] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 21440243-458c-4640-b0ba-8f3b8b1b0720/21440243-458c-4640-b0ba-8f3b8b1b0720.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.534220] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-506cba7b-746d-4157-9d34-035ce928be9c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.540176] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 863.540176] env[61972]: value = "task-1389230" [ 863.540176] env[61972]: _type = "Task" [ 863.540176] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.551909] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.557987] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389228, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.590016] env[61972]: DEBUG oslo_concurrency.lockutils [None req-12e481de-f314-4756-bce7-f31158966e86 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "667aff7f-57d5-4133-934d-386602a866f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.104s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.597041] env[61972]: INFO nova.compute.manager [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Rebuilding instance [ 863.659475] env[61972]: DEBUG nova.compute.manager [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 863.663018] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f7a521-7625-434e-8142-ac945d3b66a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.666993] env[61972]: DEBUG nova.compute.utils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.673180] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 863.673524] env[61972]: DEBUG nova.network.neutron [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.771461] env[61972]: DEBUG nova.policy [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.903720] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.979348] env[61972]: DEBUG nova.network.neutron [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Successfully updated port: db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.993510] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a7caf9-ec85-42ab-8a22-8b1b39b63913 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.999319] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe104f2-2dec-4453-844d-b801e05d3aec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.004125] env[61972]: DEBUG nova.network.neutron [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updated VIF entry in instance network info cache for port 96b44391-970b-458b-bb63-47288e6d18a2. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 864.004316] env[61972]: DEBUG nova.network.neutron [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.037739] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7dd39d-314d-430f-8727-a1f702b168f5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.050021] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.054789] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cc6790-28df-4b6f-9ef5-90d382972a3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.061895] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389230, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.067360] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389228, 'name': CreateVM_Task, 'duration_secs': 0.536027} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.075019] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.075806] env[61972]: DEBUG nova.compute.provider_tree [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.081034] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.081034] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.081034] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 864.081034] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e223e97-91c1-4979-8789-c3b5256061b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.082969] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 864.082969] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52485f87-d000-35e4-5b50-a0bd24b462c8" [ 864.082969] env[61972]: _type = "Task" [ 864.082969] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.095098] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 864.097266] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52485f87-d000-35e4-5b50-a0bd24b462c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.172079] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 864.483997] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-942b00ba-a615-452d-a0c1-633d48d73fd4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.483997] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-942b00ba-a615-452d-a0c1-633d48d73fd4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.484249] env[61972]: DEBUG nova.network.neutron [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.506801] env[61972]: DEBUG oslo_concurrency.lockutils [req-f20ff67e-5a42-4264-8fec-8921b958fbc3 req-ef73961d-ebf7-40a4-a8b5-fed31ae89936 service nova] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.532132] env[61972]: DEBUG nova.network.neutron [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Successfully created port: 3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.542992] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389229, 'name': ReconfigVM_Task, 'duration_secs': 0.930152} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.551840] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Reconfigured VM instance instance-00000045 to attach disk [datastore1] volume-766d047e-033a-4781-8bda-7ae1a40449cf/volume-766d047e-033a-4781-8bda-7ae1a40449cf.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.558343] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91b4bc08-637e-4fed-b152-7ab3275a6929 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.574893] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557696} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.575959] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 21440243-458c-4640-b0ba-8f3b8b1b0720/21440243-458c-4640-b0ba-8f3b8b1b0720.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.576206] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.576516] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 864.576516] env[61972]: value = "task-1389231" [ 864.576516] env[61972]: _type = "Task" [ 864.576516] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.576909] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-263d6386-1516-4cce-8c7b-d024dd822174 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.581186] env[61972]: DEBUG nova.scheduler.client.report [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 864.593952] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 864.593952] env[61972]: value = "task-1389232" [ 864.593952] env[61972]: _type = "Task" [ 864.593952] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.597663] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.606633] env[61972]: DEBUG nova.compute.manager [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 864.608890] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7a4adc-f8b3-4348-9e44-78456e491984 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.611836] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52485f87-d000-35e4-5b50-a0bd24b462c8, 'name': SearchDatastore_Task, 'duration_secs': 0.014296} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.614776] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.615033] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.615272] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.615420] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.615597] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.616563] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-336dd72a-cf92-4688-ba4f-a6c3e62142be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.626125] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.632447] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.633870] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.635314] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.635314] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7befcb50-8721-47b3-9f39-20761f3781a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.640601] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 864.640601] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]529e95f5-5787-1341-20e8-387ef859c71e" [ 864.640601] env[61972]: _type = "Task" [ 864.640601] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.649470] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529e95f5-5787-1341-20e8-387ef859c71e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.682520] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.682833] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdb98491-1658-4720-98c6-8f68efcb6e1d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.690331] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 864.690331] env[61972]: value = "task-1389233" [ 864.690331] env[61972]: _type = "Task" [ 864.690331] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.699326] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.018922] env[61972]: DEBUG nova.network.neutron [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.089172] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.926s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.094933] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.223s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.096652] env[61972]: INFO nova.compute.claims [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.099381] env[61972]: DEBUG oslo_vmware.api [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389231, 'name': ReconfigVM_Task, 'duration_secs': 0.153179} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.099759] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294843', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'name': 'volume-766d047e-033a-4781-8bda-7ae1a40449cf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3d424523-b45d-4174-ac7a-08fd653e314f', 'attached_at': '', 'detached_at': '', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'serial': '766d047e-033a-4781-8bda-7ae1a40449cf'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 865.110419] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069292} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.110693] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.111422] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8adb970-7e9a-460a-bc17-1dc4d9b2cd62 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.116517] env[61972]: INFO nova.scheduler.client.report [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Deleted allocations for instance 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b [ 865.136030] env[61972]: INFO nova.compute.manager [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] instance snapshotting [ 865.146264] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 21440243-458c-4640-b0ba-8f3b8b1b0720/21440243-458c-4640-b0ba-8f3b8b1b0720.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.147584] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c539d95f-646f-4ab6-9632-943413c7fca6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.163477] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1592adf6-f14e-4fc9-8b4b-9792849db28d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.171329] env[61972]: DEBUG nova.compute.manager [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Received event network-vif-plugged-db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 865.171529] env[61972]: DEBUG oslo_concurrency.lockutils [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] Acquiring lock "942b00ba-a615-452d-a0c1-633d48d73fd4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.171793] env[61972]: DEBUG oslo_concurrency.lockutils [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.171942] env[61972]: DEBUG oslo_concurrency.lockutils [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.172119] env[61972]: DEBUG nova.compute.manager [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] No waiting events found dispatching network-vif-plugged-db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 865.172255] env[61972]: WARNING nova.compute.manager [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Received unexpected event network-vif-plugged-db1b2713-6097-47ef-bec1-5ef54204a3da for instance with vm_state building and task_state spawning. [ 865.172409] env[61972]: DEBUG nova.compute.manager [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Received event network-changed-db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 865.172560] env[61972]: DEBUG nova.compute.manager [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Refreshing instance network info cache due to event network-changed-db1b2713-6097-47ef-bec1-5ef54204a3da. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 865.172760] env[61972]: DEBUG oslo_concurrency.lockutils [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] Acquiring lock "refresh_cache-942b00ba-a615-452d-a0c1-633d48d73fd4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.186208] env[61972]: DEBUG nova.network.neutron [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Updating instance_info_cache with network_info: [{"id": "db1b2713-6097-47ef-bec1-5ef54204a3da", "address": "fa:16:3e:39:34:dd", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb1b2713-60", "ovs_interfaceid": "db1b2713-6097-47ef-bec1-5ef54204a3da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.189090] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 865.196331] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960ac371-69c0-4c99-a67e-c36434672df1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.199079] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 865.199079] env[61972]: value = "task-1389234" [ 865.199079] env[61972]: _type = "Task" [ 865.199079] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.199340] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529e95f5-5787-1341-20e8-387ef859c71e, 'name': SearchDatastore_Task, 'duration_secs': 0.010972} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.208333] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91155510-e845-473f-8a9c-d3529b1e2ec5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.219711] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389233, 'name': PowerOffVM_Task, 'duration_secs': 0.106979} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.223561] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.223561] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.223561] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.223561] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.223831] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.223831] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.223831] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.223831] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.223831] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.223996] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.223996] env[61972]: DEBUG nova.virt.hardware [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.224150] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 865.224366] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.225346] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa1010c-93f6-4807-b26e-f2423d248e31 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.232443] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06dd6c4-ed89-4575-b59f-a62fa8d3ddc3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.235343] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.238977] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 865.238977] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c77f87-37cc-5454-68d6-47c39d2069f5" [ 865.238977] env[61972]: _type = "Task" [ 865.238977] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.242874] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 865.245799] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-613f05c8-0a0a-4de2-8cfa-4ee55e27c3c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.248337] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048b9244-22c9-4989-970a-cc2200137d46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.267889] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c77f87-37cc-5454-68d6-47c39d2069f5, 'name': SearchDatastore_Task, 'duration_secs': 0.0095} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.268185] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.268444] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.268952] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5fecf98-767f-4bf0-9a77-9cd222c16260 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.274812] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 865.274812] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 865.274812] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Deleting the datastore file [datastore1] 667aff7f-57d5-4133-934d-386602a866f8 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 865.274812] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ecabbd5-146b-4471-81e5-1257deed50c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.278068] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 865.278068] env[61972]: value = "task-1389236" [ 865.278068] env[61972]: _type = "Task" [ 865.278068] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.283057] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 865.283057] env[61972]: value = "task-1389237" [ 865.283057] env[61972]: _type = "Task" [ 865.283057] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.288781] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389236, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.293213] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.626210] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1120124c-6f29-4c4b-af1c-c54971617578 tempest-ServerMetadataNegativeTestJSON-1858154920 tempest-ServerMetadataNegativeTestJSON-1858154920-project-member] Lock "1cd50cd6-ccb2-41aa-8c24-9eabed18de6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.179s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.693523] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-942b00ba-a615-452d-a0c1-633d48d73fd4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.694064] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance network_info: |[{"id": "db1b2713-6097-47ef-bec1-5ef54204a3da", "address": "fa:16:3e:39:34:dd", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb1b2713-60", "ovs_interfaceid": "db1b2713-6097-47ef-bec1-5ef54204a3da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 865.694427] env[61972]: DEBUG oslo_concurrency.lockutils [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] Acquired lock "refresh_cache-942b00ba-a615-452d-a0c1-633d48d73fd4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.694657] env[61972]: DEBUG nova.network.neutron [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Refreshing network info cache for port db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.696492] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:34:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db1b2713-6097-47ef-bec1-5ef54204a3da', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.705747] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating folder: Project (8a685a448ff041db8bc49b4429688e34). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.709260] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c723430e-70c4-45fb-9311-0c5efb85b8a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.722248] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 865.723087] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389234, 'name': ReconfigVM_Task, 'duration_secs': 0.386873} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.724525] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6f8e780d-a13c-48dc-8e74-d0f890a75bb5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.726549] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 21440243-458c-4640-b0ba-8f3b8b1b0720/21440243-458c-4640-b0ba-8f3b8b1b0720.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.727262] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created folder: Project (8a685a448ff041db8bc49b4429688e34) in parent group-v294799. [ 865.727439] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating folder: Instances. Parent ref: group-v294850. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.727694] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a526862-de42-4e24-8f30-70d5ffbacacd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.729324] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11ad3442-1b82-442d-ab73-6e7cb97976b4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.735722] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 865.735722] env[61972]: value = "task-1389239" [ 865.735722] env[61972]: _type = "Task" [ 865.735722] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.737109] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 865.737109] env[61972]: value = "task-1389241" [ 865.737109] env[61972]: _type = "Task" [ 865.737109] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.746188] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created folder: Instances in parent group-v294850. [ 865.746481] env[61972]: DEBUG oslo.service.loopingcall [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.747009] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.747249] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d094b7fd-3bd9-4da6-8760-720c3734b13d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.770985] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389241, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.771265] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389239, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.775961] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.775961] env[61972]: value = "task-1389242" [ 865.775961] env[61972]: _type = "Task" [ 865.775961] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.792194] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389236, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50386} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.792386] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389242, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.793044] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.793279] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.793518] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2327dab1-ecd0-4894-a263-9f41b69139e3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.798081] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093518} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.798642] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 865.798835] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 865.799017] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 865.803384] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 865.803384] env[61972]: value = "task-1389243" [ 865.803384] env[61972]: _type = "Task" [ 865.803384] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.811978] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389243, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.034037] env[61972]: DEBUG nova.compute.manager [req-6cffbf63-a338-46bf-af86-4d2284a6c879 req-8ffb9e36-3bfe-44ae-afac-749d506483f9 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Received event network-vif-plugged-3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 866.034278] env[61972]: DEBUG oslo_concurrency.lockutils [req-6cffbf63-a338-46bf-af86-4d2284a6c879 req-8ffb9e36-3bfe-44ae-afac-749d506483f9 service nova] Acquiring lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.034582] env[61972]: DEBUG oslo_concurrency.lockutils [req-6cffbf63-a338-46bf-af86-4d2284a6c879 req-8ffb9e36-3bfe-44ae-afac-749d506483f9 service nova] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.034645] env[61972]: DEBUG oslo_concurrency.lockutils [req-6cffbf63-a338-46bf-af86-4d2284a6c879 req-8ffb9e36-3bfe-44ae-afac-749d506483f9 service nova] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.034873] env[61972]: DEBUG nova.compute.manager [req-6cffbf63-a338-46bf-af86-4d2284a6c879 req-8ffb9e36-3bfe-44ae-afac-749d506483f9 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] No waiting events found dispatching network-vif-plugged-3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.035095] env[61972]: WARNING nova.compute.manager [req-6cffbf63-a338-46bf-af86-4d2284a6c879 req-8ffb9e36-3bfe-44ae-afac-749d506483f9 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Received unexpected event network-vif-plugged-3583e7ca-03b2-4200-8a2a-9394e6cec912 for instance with vm_state building and task_state spawning. [ 866.043760] env[61972]: DEBUG nova.network.neutron [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Updated VIF entry in instance network info cache for port db1b2713-6097-47ef-bec1-5ef54204a3da. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.044170] env[61972]: DEBUG nova.network.neutron [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Updating instance_info_cache with network_info: [{"id": "db1b2713-6097-47ef-bec1-5ef54204a3da", "address": "fa:16:3e:39:34:dd", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb1b2713-60", "ovs_interfaceid": "db1b2713-6097-47ef-bec1-5ef54204a3da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.148714] env[61972]: DEBUG nova.objects.instance [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'flavor' on Instance uuid 3d424523-b45d-4174-ac7a-08fd653e314f {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.169197] env[61972]: DEBUG nova.network.neutron [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Successfully updated port: 3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.252427] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389239, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.255668] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389241, 'name': Rename_Task, 'duration_secs': 0.139919} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.258138] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.258382] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79114727-98e4-4468-a95f-cb9aa0c272c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.264874] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 866.264874] env[61972]: value = "task-1389244" [ 866.264874] env[61972]: _type = "Task" [ 866.264874] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.274752] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.288245] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389242, 'name': CreateVM_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.316828] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389243, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063217} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.317024] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.318341] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b4e05f-b8b7-4f23-9212-48d0b1285694 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.342866] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.346217] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe90c91d-7a40-4322-af8f-2190dfb3c82b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.367877] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 866.367877] env[61972]: value = "task-1389245" [ 866.367877] env[61972]: _type = "Task" [ 866.367877] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.377142] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.378781] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd868e9f-c6cc-49e6-a2ce-c6f6b46306a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.386816] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50447e6-0a1b-4597-9c5e-e00eb28ce977 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.422411] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4743a3-0a52-4f86-b40f-0451fa842c16 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.428995] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2cbb82-cd64-4775-a030-d21efc3fea8e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.442592] env[61972]: DEBUG nova.compute.provider_tree [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.548048] env[61972]: DEBUG oslo_concurrency.lockutils [req-f777d386-8366-4c71-b846-f59696d4cddf req-7586ac60-fb5f-4427-a57a-7d8daeb92e1f service nova] Releasing lock "refresh_cache-942b00ba-a615-452d-a0c1-633d48d73fd4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.654465] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1414ffb9-1a7b-485b-a7a3-020ae4ffb9d3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.803s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.674795] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.674948] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.675134] env[61972]: DEBUG nova.network.neutron [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.747338] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389239, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.775458] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389244, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.786080] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389242, 'name': CreateVM_Task, 'duration_secs': 0.831818} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.786272] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.787025] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.787261] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.787807] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.787974] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-191f3513-b5b0-4ec5-854b-522b68d2b8a5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.797284] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 866.797284] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a93a0e-7f02-3537-6b09-ff9f06480fb6" [ 866.797284] env[61972]: _type = "Task" [ 866.797284] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.806506] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a93a0e-7f02-3537-6b09-ff9f06480fb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.838645] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.838820] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.839056] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.839211] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.839270] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.839443] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.839667] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.839909] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.839994] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.840240] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.840406] env[61972]: DEBUG nova.virt.hardware [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.841566] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a082594-cac6-406c-b970-9bf3507e1a35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.850039] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d8820f-c9ad-4fd3-9856-500209ff17be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.863861] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.870262] env[61972]: DEBUG oslo.service.loopingcall [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.870573] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.873824] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-095d2088-76d8-4e1d-9cc4-7137f1fdd9c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.893023] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389245, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.894277] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.894277] env[61972]: value = "task-1389246" [ 866.894277] env[61972]: _type = "Task" [ 866.894277] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.901668] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389246, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.946618] env[61972]: DEBUG nova.scheduler.client.report [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 867.234048] env[61972]: DEBUG nova.network.neutron [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.248139] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389239, 'name': CreateSnapshot_Task, 'duration_secs': 1.27358} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.248448] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 867.250027] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b37a22c-d765-4e78-826d-61f4ef5f7686 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.277020] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389244, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.295097] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "3d424523-b45d-4174-ac7a-08fd653e314f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.296370] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.296370] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "3d424523-b45d-4174-ac7a-08fd653e314f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.296370] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.296370] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.298467] env[61972]: INFO nova.compute.manager [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Terminating instance [ 867.314765] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a93a0e-7f02-3537-6b09-ff9f06480fb6, 'name': SearchDatastore_Task, 'duration_secs': 0.008988} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.314765] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.315241] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.315475] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.315622] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.315798] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.316274] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b374f736-a147-418f-be50-c9d015ba42f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.326221] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.326423] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.327187] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abb48f85-c6fc-4c1b-b988-767d04677402 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.333475] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 867.333475] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f26861-860e-6e76-2693-fae140fe0cea" [ 867.333475] env[61972]: _type = "Task" [ 867.333475] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.344189] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f26861-860e-6e76-2693-fae140fe0cea, 'name': SearchDatastore_Task, 'duration_secs': 0.008349} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.345488] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5ecbd9b-107d-4f40-8fda-7d1d68236bab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.350434] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 867.350434] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52722d61-4df9-1410-4e6b-eaa22e6c1023" [ 867.350434] env[61972]: _type = "Task" [ 867.350434] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.360966] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52722d61-4df9-1410-4e6b-eaa22e6c1023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.381984] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389245, 'name': ReconfigVM_Task, 'duration_secs': 0.93803} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.382269] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.383056] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb23dd7e-81d5-4e9a-b9c8-f4c9aecb8d1a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.391585] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 867.391585] env[61972]: value = "task-1389247" [ 867.391585] env[61972]: _type = "Task" [ 867.391585] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.403325] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389247, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.407621] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389246, 'name': CreateVM_Task, 'duration_secs': 0.260397} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.407805] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.408333] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.408670] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.409026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.409343] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef3138d4-ee6c-4491-bf5c-dce1ed9b99f8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.414254] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 867.414254] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ab5385-a42d-9c47-3494-5adea277904e" [ 867.414254] env[61972]: _type = "Task" [ 867.414254] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.426877] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ab5385-a42d-9c47-3494-5adea277904e, 'name': SearchDatastore_Task, 'duration_secs': 0.009508} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.427262] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.427530] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.427747] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.452542] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.456980] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 867.456980] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.142s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.466559] env[61972]: DEBUG nova.network.neutron [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updating instance_info_cache with network_info: [{"id": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "address": "fa:16:3e:03:c7:bb", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3583e7ca-03", "ovs_interfaceid": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.768168] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 867.768947] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d35c14cd-72e5-4201-8872-953ed7fbc3a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.782198] env[61972]: DEBUG oslo_vmware.api [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389244, 'name': PowerOnVM_Task, 'duration_secs': 1.189815} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.783490] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.783742] env[61972]: INFO nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Took 9.20 seconds to spawn the instance on the hypervisor. [ 867.783939] env[61972]: DEBUG nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 867.784286] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 867.784286] env[61972]: value = "task-1389248" [ 867.784286] env[61972]: _type = "Task" [ 867.784286] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.784960] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bb1ad4-7683-47f1-ac05-b65b8f9138e0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.797157] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389248, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.808015] env[61972]: DEBUG nova.compute.manager [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 867.810023] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.810023] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69d750ea-4933-4b2f-a19b-8158dffa9a9a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.815835] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 867.815835] env[61972]: value = "task-1389249" [ 867.815835] env[61972]: _type = "Task" [ 867.815835] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.825241] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.861982] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52722d61-4df9-1410-4e6b-eaa22e6c1023, 'name': SearchDatastore_Task, 'duration_secs': 0.008545} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.862350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.862712] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.863059] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.863264] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.863922] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afde5b9b-933e-432a-9c29-af11cac1f4a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.865628] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df523b0f-81a4-4ecb-8054-baf06e3e0ec5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.871744] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 867.871744] env[61972]: value = "task-1389250" [ 867.871744] env[61972]: _type = "Task" [ 867.871744] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.876272] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.876479] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.877617] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa2d6de4-cc2c-485d-8bed-f95238b31685 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.883739] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.888018] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 867.888018] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f91bb9-97df-a4ac-cfa0-c1cf871b6314" [ 867.888018] env[61972]: _type = "Task" [ 867.888018] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.897865] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f91bb9-97df-a4ac-cfa0-c1cf871b6314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.908343] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389247, 'name': Rename_Task, 'duration_secs': 0.1864} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.908899] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.909213] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72b5bcb8-34bd-44dc-a4ed-7b03ca19302a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.917440] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 867.917440] env[61972]: value = "task-1389251" [ 867.917440] env[61972]: _type = "Task" [ 867.917440] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.926926] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.959810] env[61972]: DEBUG nova.compute.utils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.969108] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 867.969378] env[61972]: DEBUG nova.network.neutron [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.972593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.973166] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Instance network_info: |[{"id": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "address": "fa:16:3e:03:c7:bb", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3583e7ca-03", "ovs_interfaceid": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 867.973478] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:c7:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3583e7ca-03b2-4200-8a2a-9394e6cec912', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.982891] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating folder: Project (c57829399c5741c08c30bb60163148b3). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.984563] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cdfb187-b2d0-4d7a-bb50-de2a60d7dffa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.998201] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created folder: Project (c57829399c5741c08c30bb60163148b3) in parent group-v294799. [ 867.998201] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating folder: Instances. Parent ref: group-v294856. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.998748] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77d9cd22-00f5-43b1-bcad-21e19875163d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.014024] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created folder: Instances in parent group-v294856. [ 868.014024] env[61972]: DEBUG oslo.service.loopingcall [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.014024] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 868.014024] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6e7ef8a-437a-493b-9c67-9d7ce9d602a5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.040022] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.040022] env[61972]: value = "task-1389254" [ 868.040022] env[61972]: _type = "Task" [ 868.040022] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.048829] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389254, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.067233] env[61972]: DEBUG nova.policy [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b66ac3f5faaa4ea2b8dc27da9f13b684', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dddfa1d6702d444faf82e9e456f124f9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 868.073693] env[61972]: DEBUG nova.compute.manager [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Received event network-changed-3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 868.073968] env[61972]: DEBUG nova.compute.manager [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Refreshing instance network info cache due to event network-changed-3583e7ca-03b2-4200-8a2a-9394e6cec912. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 868.074278] env[61972]: DEBUG oslo_concurrency.lockutils [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] Acquiring lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.074520] env[61972]: DEBUG oslo_concurrency.lockutils [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] Acquired lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.074779] env[61972]: DEBUG nova.network.neutron [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Refreshing network info cache for port 3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.301590] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389248, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.313133] env[61972]: INFO nova.compute.manager [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Took 19.79 seconds to build instance. [ 868.328029] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389249, 'name': PowerOffVM_Task, 'duration_secs': 0.204199} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.328300] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.328514] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Volume detach. Driver type: vmdk {{(pid=61972) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 868.328706] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294843', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'name': 'volume-766d047e-033a-4781-8bda-7ae1a40449cf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3d424523-b45d-4174-ac7a-08fd653e314f', 'attached_at': '', 'detached_at': '', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'serial': '766d047e-033a-4781-8bda-7ae1a40449cf'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 868.329541] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4083b94a-ef71-40a0-aa14-fb6f68d5d552 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.352389] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a80eafe-4a3e-438b-959a-f5ea5cefd83a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.361428] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13fc413-ad23-4382-ba3f-7db9003451bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.400053] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c5dc96-b3fa-4fa3-bf7c-52b5a8319417 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.411029] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389250, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.427887] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f91bb9-97df-a4ac-cfa0-c1cf871b6314, 'name': SearchDatastore_Task, 'duration_secs': 0.009138} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.427887] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] The volume has not been displaced from its original location: [datastore1] volume-766d047e-033a-4781-8bda-7ae1a40449cf/volume-766d047e-033a-4781-8bda-7ae1a40449cf.vmdk. No consolidation needed. {{(pid=61972) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 868.432948] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 868.437040] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38714b81-7539-4faa-a493-b0f58119a4b0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.451798] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8effc80b-3b0c-4f95-bfe0-b21ade250c54 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.463045] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389251, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.466116] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 868.466116] env[61972]: value = "task-1389255" [ 868.466116] env[61972]: _type = "Task" [ 868.466116] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.466542] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 868.466542] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525d51fe-257b-ab38-fa05-c40f29dbaade" [ 868.466542] env[61972]: _type = "Task" [ 868.466542] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.473651] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 868.491170] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389255, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.491485] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525d51fe-257b-ab38-fa05-c40f29dbaade, 'name': SearchDatastore_Task, 'duration_secs': 0.008875} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.491728] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.492035] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.492335] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-072fbda2-a0e2-4894-bbee-5f9af7659c59 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.501027] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 868.501027] env[61972]: value = "task-1389256" [ 868.501027] env[61972]: _type = "Task" [ 868.501027] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.508877] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.509904] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 0cd09167-2c2f-4cad-b26d-35aa208fbf79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.510096] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance a77d41aa-13ba-4d26-b5fd-4928891948ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.510316] env[61972]: WARNING nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 89cbc6ec-7546-443c-9abb-47940d223daa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 868.510485] env[61972]: WARNING nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance caad50a8-e0ad-4ca9-b391-691ead1756f0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 868.510616] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 3d424523-b45d-4174-ac7a-08fd653e314f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.510762] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e2b6dd4e-b639-4553-a45f-87c155506ea3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.510866] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 94bd64b9-3d20-4631-baed-4500f9beb9c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.510972] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 667aff7f-57d5-4133-934d-386602a866f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.511091] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 21440243-458c-4640-b0ba-8f3b8b1b0720 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.511235] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9562558a-89ba-4169-bd0a-ad31fc0c33bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.511353] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 942b00ba-a615-452d-a0c1-633d48d73fd4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.511464] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 72435dc4-eae1-4606-bb32-e7e8e282d0b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.511882] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance b9726bf4-a4b1-4b22-840f-98157d0d790c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.551626] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389254, 'name': CreateVM_Task, 'duration_secs': 0.474944} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.551883] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.552930] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.553156] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.553527] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.553991] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fb7143a-19f4-4479-b3aa-438de46a6162 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.560094] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 868.560094] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf1642-3b10-ee06-d8e8-dbec72a0dc71" [ 868.560094] env[61972]: _type = "Task" [ 868.560094] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.568997] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf1642-3b10-ee06-d8e8-dbec72a0dc71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.573131] env[61972]: DEBUG nova.network.neutron [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Successfully created port: 6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.799600] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389248, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.815667] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5286fd75-3bff-4a12-8b00-3ab5821e177d tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.112s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.838498] env[61972]: DEBUG nova.network.neutron [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updated VIF entry in instance network info cache for port 3583e7ca-03b2-4200-8a2a-9394e6cec912. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.838498] env[61972]: DEBUG nova.network.neutron [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updating instance_info_cache with network_info: [{"id": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "address": "fa:16:3e:03:c7:bb", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3583e7ca-03", "ovs_interfaceid": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.905323] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542745} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.909030] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.909030] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.909030] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74e64bf2-d2da-4818-800f-3fc1b808fdec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.915447] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 868.915447] env[61972]: value = "task-1389257" [ 868.915447] env[61972]: _type = "Task" [ 868.915447] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.927222] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.943783] env[61972]: DEBUG oslo_vmware.api [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389251, 'name': PowerOnVM_Task, 'duration_secs': 0.827017} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.943783] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.944631] env[61972]: INFO nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Took 8.01 seconds to spawn the instance on the hypervisor. [ 868.944935] env[61972]: DEBUG nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 868.945912] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e201591b-4aa3-43b1-8256-b795cfd672c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.977578] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389255, 'name': ReconfigVM_Task, 'duration_secs': 0.205138} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.977972] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 868.987486] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e24ba59b-0e66-4777-b8ee-7b3370b6f532 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.005251] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 869.005251] env[61972]: value = "task-1389258" [ 869.005251] env[61972]: _type = "Task" [ 869.005251] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.008518] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389256, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490456} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.012402] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.012703] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.013259] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1455e944-374e-4f4a-ab1b-89ce3854c577 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.015441] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9a0463a0-dc96-41b1-8415-22011644ac0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.021886] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.023484] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 869.023484] env[61972]: value = "task-1389259" [ 869.023484] env[61972]: _type = "Task" [ 869.023484] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.035033] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389259, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.071115] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf1642-3b10-ee06-d8e8-dbec72a0dc71, 'name': SearchDatastore_Task, 'duration_secs': 0.008939} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.071457] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.071685] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.071923] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.072086] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.072271] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.072540] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-310255c5-ecd2-4a42-a0f5-1d583c314e07 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.086550] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.086744] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.087511] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3234088-a7bb-420e-b15a-0390571ccf90 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.093240] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 869.093240] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fa598e-55b8-66c0-fd14-e603870e96d4" [ 869.093240] env[61972]: _type = "Task" [ 869.093240] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.101911] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fa598e-55b8-66c0-fd14-e603870e96d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.300385] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389248, 'name': CloneVM_Task, 'duration_secs': 1.329026} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.300701] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Created linked-clone VM from snapshot [ 869.301719] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275e048c-b8d6-42b8-9a1a-78db7f857c1f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.310082] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Uploading image 1955dd95-d842-4a9b-943b-cc10e94d7867 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 869.340550] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 869.340550] env[61972]: value = "vm-294855" [ 869.340550] env[61972]: _type = "VirtualMachine" [ 869.340550] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 869.342022] env[61972]: DEBUG oslo_concurrency.lockutils [req-9e781307-0d9d-4696-97a8-5456cceda948 req-c3336094-d4c7-4d8a-aabe-0c2f9c3fa540 service nova] Releasing lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.342022] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c9aab185-11ad-41e0-94d7-eed47b905f20 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.349630] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lease: (returnval){ [ 869.349630] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522b29e0-5725-c409-7a5c-e27a1e5bd6f0" [ 869.349630] env[61972]: _type = "HttpNfcLease" [ 869.349630] env[61972]: } obtained for exporting VM: (result){ [ 869.349630] env[61972]: value = "vm-294855" [ 869.349630] env[61972]: _type = "VirtualMachine" [ 869.349630] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 869.350180] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the lease: (returnval){ [ 869.350180] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522b29e0-5725-c409-7a5c-e27a1e5bd6f0" [ 869.350180] env[61972]: _type = "HttpNfcLease" [ 869.350180] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 869.357507] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.357507] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522b29e0-5725-c409-7a5c-e27a1e5bd6f0" [ 869.357507] env[61972]: _type = "HttpNfcLease" [ 869.357507] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 869.429576] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071348} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.430107] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.431605] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c394ec53-0bb7-4337-bb53-724fbc2c66e9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.483988] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.487390] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a977577-7c23-486d-be3b-2a662036aa42 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.515640] env[61972]: INFO nova.compute.manager [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Took 19.62 seconds to build instance. [ 869.518335] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 869.529999] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 489f1de0-d1c8-4429-a6f1-24ea885282f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.539027] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 869.539027] env[61972]: value = "task-1389261" [ 869.539027] env[61972]: _type = "Task" [ 869.539027] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.544564] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389258, 'name': ReconfigVM_Task, 'duration_secs': 0.534637} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.551396] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294843', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'name': 'volume-766d047e-033a-4781-8bda-7ae1a40449cf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3d424523-b45d-4174-ac7a-08fd653e314f', 'attached_at': '', 'detached_at': '', 'volume_id': '766d047e-033a-4781-8bda-7ae1a40449cf', 'serial': '766d047e-033a-4781-8bda-7ae1a40449cf'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 869.551396] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.551396] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389259, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.226824} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.552693] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0742ec-ea75-4197-b615-61f9d2a391ed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.557711] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.562349] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e71020b-debe-46b3-b86a-4d904175f1f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.566790] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389261, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.584367] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.588697] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 869.588933] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 869.589109] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.589296] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 869.589445] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.589593] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 869.589807] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 869.589969] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 869.590155] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 869.590309] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 869.590480] env[61972]: DEBUG nova.virt.hardware [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.591143] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd3d8cb4-24ba-499f-8408-f4debaa81fcf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.608792] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.609706] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e065d983-e624-42f8-91b2-e4be22144c7d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.613160] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac14974f-17a4-48bf-bc10-f9f6b46de03b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.628588] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8131dc48-dcc9-4dff-8496-4b2c6cb0e3b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.632410] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 869.632410] env[61972]: value = "task-1389262" [ 869.632410] env[61972]: _type = "Task" [ 869.632410] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.632689] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fa598e-55b8-66c0-fd14-e603870e96d4, 'name': SearchDatastore_Task, 'duration_secs': 0.057223} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.636710] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12bb03d1-0574-4c74-aa30-80a33cc73cff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.650948] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 869.650948] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ae30ef-d217-f6ed-bb6e-b0e8812a6b2f" [ 869.650948] env[61972]: _type = "Task" [ 869.650948] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.654141] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.661694] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ae30ef-d217-f6ed-bb6e-b0e8812a6b2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.709107] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.709413] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.709685] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleting the datastore file [datastore1] 3d424523-b45d-4174-ac7a-08fd653e314f {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.710080] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebe2943b-aa42-4e44-991c-142d0330b9d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.717522] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 869.717522] env[61972]: value = "task-1389264" [ 869.717522] env[61972]: _type = "Task" [ 869.717522] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.725938] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.859574] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.859574] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522b29e0-5725-c409-7a5c-e27a1e5bd6f0" [ 869.859574] env[61972]: _type = "HttpNfcLease" [ 869.859574] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 869.860141] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 869.860141] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522b29e0-5725-c409-7a5c-e27a1e5bd6f0" [ 869.860141] env[61972]: _type = "HttpNfcLease" [ 869.860141] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 869.860693] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528c94c9-e39b-408a-9ca5-0d5dd2947974 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.869553] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523cea9d-257e-466e-c1c1-e9a24ee63adb/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 869.869765] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523cea9d-257e-466e-c1c1-e9a24ee63adb/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 870.022739] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4c8ac79b-f6df-4a87-96cc-f38414a4d563 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.736s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.037824] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 56e21cf4-4dbc-4f72-97c0-082dd689c046 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.039273] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5a02334d-745b-4273-8d1d-8cc4bd401401 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.050574] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389261, 'name': ReconfigVM_Task, 'duration_secs': 0.490569} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.053115] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.053115] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6676690-5070-45a9-8f4c-d45a27784a7a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.059933] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 870.059933] env[61972]: value = "task-1389265" [ 870.059933] env[61972]: _type = "Task" [ 870.059933] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.071020] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389265, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.107460] env[61972]: DEBUG nova.compute.manager [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 870.107816] env[61972]: DEBUG nova.compute.manager [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing instance network info cache due to event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 870.107955] env[61972]: DEBUG oslo_concurrency.lockutils [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.108135] env[61972]: DEBUG oslo_concurrency.lockutils [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.108345] env[61972]: DEBUG nova.network.neutron [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.143331] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389262, 'name': ReconfigVM_Task, 'duration_secs': 0.316932} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.143602] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 667aff7f-57d5-4133-934d-386602a866f8/667aff7f-57d5-4133-934d-386602a866f8.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.144269] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a48d3f0-4d3c-46ad-9011-cfa20c1d600d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.151351] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 870.151351] env[61972]: value = "task-1389266" [ 870.151351] env[61972]: _type = "Task" [ 870.151351] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.162441] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389266, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.165974] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ae30ef-d217-f6ed-bb6e-b0e8812a6b2f, 'name': SearchDatastore_Task, 'duration_secs': 0.020958} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.166218] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.166481] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 72435dc4-eae1-4606-bb32-e7e8e282d0b9/72435dc4-eae1-4606-bb32-e7e8e282d0b9.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.166733] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2a14e9c-b492-4e0c-acbc-fbf91b42b20d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.173709] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 870.173709] env[61972]: value = "task-1389267" [ 870.173709] env[61972]: _type = "Task" [ 870.173709] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.182071] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.229622] env[61972]: DEBUG oslo_vmware.api [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156796} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.230135] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.230135] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.230279] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.230424] env[61972]: INFO nova.compute.manager [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Took 2.42 seconds to destroy the instance on the hypervisor. [ 870.230668] env[61972]: DEBUG oslo.service.loopingcall [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.230858] env[61972]: DEBUG nova.compute.manager [-] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 870.230952] env[61972]: DEBUG nova.network.neutron [-] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.547464] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.547464] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 870.547464] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 870.574871] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389265, 'name': Rename_Task, 'duration_secs': 0.135073} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.577139] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.577646] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b8c769e-7e10-4f98-80e8-ba6a8aeb902b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.586894] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 870.586894] env[61972]: value = "task-1389268" [ 870.586894] env[61972]: _type = "Task" [ 870.586894] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.606183] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389268, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.670205] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389266, 'name': Rename_Task, 'duration_secs': 0.13135} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.674597] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.678351] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee5a8bdc-4f12-4db3-b850-80e62de8d523 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.686399] env[61972]: DEBUG nova.compute.manager [req-c9a1abd6-95b1-4d9f-b915-4995db2779e4 req-0717d5e1-1076-470e-aa3a-fb0878ef21c0 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Received event network-vif-deleted-f16ae0e8-600f-41e1-b72f-f6adfad3ec9e {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 870.686399] env[61972]: INFO nova.compute.manager [req-c9a1abd6-95b1-4d9f-b915-4995db2779e4 req-0717d5e1-1076-470e-aa3a-fb0878ef21c0 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Neutron deleted interface f16ae0e8-600f-41e1-b72f-f6adfad3ec9e; detaching it from the instance and deleting it from the info cache [ 870.686641] env[61972]: DEBUG nova.network.neutron [req-c9a1abd6-95b1-4d9f-b915-4995db2779e4 req-0717d5e1-1076-470e-aa3a-fb0878ef21c0 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.696077] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389267, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519368} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.701177] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 72435dc4-eae1-4606-bb32-e7e8e282d0b9/72435dc4-eae1-4606-bb32-e7e8e282d0b9.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.701177] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.701177] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 870.701177] env[61972]: value = "task-1389269" [ 870.701177] env[61972]: _type = "Task" [ 870.701177] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.701798] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b7e9ced-a3e8-470b-831a-616b211a3468 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.718666] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 870.718666] env[61972]: value = "task-1389270" [ 870.718666] env[61972]: _type = "Task" [ 870.718666] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.719454] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389269, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.734472] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389270, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.901388] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9144977-91d1-46a0-a0e1-752e7bc35bd4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.910325] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5216a9-27d3-4f2f-8b03-038a06fff835 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.946285] env[61972]: DEBUG nova.network.neutron [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updated VIF entry in instance network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.946791] env[61972]: DEBUG nova.network.neutron [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.948779] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b83ca3a-bb2c-475c-a6f4-8ad7c25e1f91 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.961360] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4215eb8-5e72-47c8-a16e-cd33614a12fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.978954] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.055404] env[61972]: DEBUG nova.network.neutron [-] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.108545] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389268, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.140802] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.141210] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.192066] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c23088d5-c20d-44f5-be06-a14db8f5ec61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.207712] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28ce907-fdc6-4b7e-b104-2a0e8cee1d62 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.230266] env[61972]: DEBUG oslo_vmware.api [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389269, 'name': PowerOnVM_Task, 'duration_secs': 0.461391} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.231118] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.231378] env[61972]: DEBUG nova.compute.manager [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 871.232277] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6388a3af-4bef-4b1c-8572-ce5a456101dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.251689] env[61972]: DEBUG nova.compute.manager [req-c9a1abd6-95b1-4d9f-b915-4995db2779e4 req-0717d5e1-1076-470e-aa3a-fb0878ef21c0 service nova] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Detach interface failed, port_id=f16ae0e8-600f-41e1-b72f-f6adfad3ec9e, reason: Instance 3d424523-b45d-4174-ac7a-08fd653e314f could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 871.252156] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389270, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067244} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.253159] env[61972]: DEBUG nova.network.neutron [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Successfully updated port: 6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.257516] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.257516] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b2c6c9-5819-4e81-b471-d728e18d0344 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.286740] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 72435dc4-eae1-4606-bb32-e7e8e282d0b9/72435dc4-eae1-4606-bb32-e7e8e282d0b9.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.288056] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aed7c59-2770-4071-992f-14a8c2deb6ca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.315870] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 871.315870] env[61972]: value = "task-1389271" [ 871.315870] env[61972]: _type = "Task" [ 871.315870] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.328104] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389271, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.455715] env[61972]: DEBUG oslo_concurrency.lockutils [req-ee8640bf-e80c-4fa5-ae98-fa6b6796a8f2 req-15bbeb44-8994-42fc-b473-57bebea11fb5 service nova] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.482340] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 871.559151] env[61972]: INFO nova.compute.manager [-] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Took 1.33 seconds to deallocate network for instance. [ 871.605449] env[61972]: DEBUG oslo_vmware.api [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389268, 'name': PowerOnVM_Task, 'duration_secs': 0.709389} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.605775] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.605993] env[61972]: INFO nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Took 8.27 seconds to spawn the instance on the hypervisor. [ 871.606276] env[61972]: DEBUG nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 871.607162] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4acb90c-b653-4458-a5dd-8a73c98b6dfe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.645074] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 871.756408] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.756558] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquired lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.756710] env[61972]: DEBUG nova.network.neutron [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 871.774289] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.828822] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389271, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.989059] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 871.989059] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.532s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.989479] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.865s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.989479] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.991535] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.636s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.993070] env[61972]: INFO nova.compute.claims [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.017033] env[61972]: INFO nova.scheduler.client.report [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleted allocations for instance 89cbc6ec-7546-443c-9abb-47940d223daa [ 872.107484] env[61972]: INFO nova.compute.manager [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Took 0.55 seconds to detach 1 volumes for instance. [ 872.123911] env[61972]: INFO nova.compute.manager [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Took 20.70 seconds to build instance. [ 872.134996] env[61972]: DEBUG nova.compute.manager [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Received event network-vif-plugged-6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 872.135261] env[61972]: DEBUG oslo_concurrency.lockutils [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] Acquiring lock "b9726bf4-a4b1-4b22-840f-98157d0d790c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.135477] env[61972]: DEBUG oslo_concurrency.lockutils [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.135653] env[61972]: DEBUG oslo_concurrency.lockutils [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.135824] env[61972]: DEBUG nova.compute.manager [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] No waiting events found dispatching network-vif-plugged-6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 872.136074] env[61972]: WARNING nova.compute.manager [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Received unexpected event network-vif-plugged-6348fdb6-1e04-4d45-b3d2-e67eb05449f7 for instance with vm_state building and task_state spawning. [ 872.136302] env[61972]: DEBUG nova.compute.manager [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Received event network-changed-6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 872.136479] env[61972]: DEBUG nova.compute.manager [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Refreshing instance network info cache due to event network-changed-6348fdb6-1e04-4d45-b3d2-e67eb05449f7. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 872.136660] env[61972]: DEBUG oslo_concurrency.lockutils [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] Acquiring lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.162319] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.194976] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "667aff7f-57d5-4133-934d-386602a866f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.195210] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "667aff7f-57d5-4133-934d-386602a866f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.195481] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "667aff7f-57d5-4133-934d-386602a866f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.195683] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "667aff7f-57d5-4133-934d-386602a866f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.195857] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "667aff7f-57d5-4133-934d-386602a866f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.198059] env[61972]: INFO nova.compute.manager [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Terminating instance [ 872.298831] env[61972]: DEBUG nova.network.neutron [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 872.329429] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389271, 'name': ReconfigVM_Task, 'duration_secs': 0.538339} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.329769] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 72435dc4-eae1-4606-bb32-e7e8e282d0b9/72435dc4-eae1-4606-bb32-e7e8e282d0b9.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.330577] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d70012a-ba33-43e5-a3f5-c66eb195ab75 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.336684] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 872.336684] env[61972]: value = "task-1389272" [ 872.336684] env[61972]: _type = "Task" [ 872.336684] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.344870] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389272, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.525483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d66a7c1f-3323-4933-b6a6-30cd70c147bd tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "89cbc6ec-7546-443c-9abb-47940d223daa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.909s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.613754] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.629015] env[61972]: DEBUG oslo_concurrency.lockutils [None req-30e619b2-475b-4621-bd74-d040e18a3696 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.863s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.683292] env[61972]: DEBUG nova.network.neutron [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updating instance_info_cache with network_info: [{"id": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "address": "fa:16:3e:ef:39:8c", "network": {"id": "d8afcb85-a62b-495d-aaca-f790f02686f9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-98749794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dddfa1d6702d444faf82e9e456f124f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348fdb6-1e", "ovs_interfaceid": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.702439] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "refresh_cache-667aff7f-57d5-4133-934d-386602a866f8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.702630] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquired lock "refresh_cache-667aff7f-57d5-4133-934d-386602a866f8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.702934] env[61972]: DEBUG nova.network.neutron [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.851165] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389272, 'name': Rename_Task, 'duration_secs': 0.214478} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.851556] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.851974] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-651e9013-dbea-4fb7-ab58-df21f26906be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.859826] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 872.859826] env[61972]: value = "task-1389273" [ 872.859826] env[61972]: _type = "Task" [ 872.859826] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.871371] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.185721] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Releasing lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.186070] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Instance network_info: |[{"id": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "address": "fa:16:3e:ef:39:8c", "network": {"id": "d8afcb85-a62b-495d-aaca-f790f02686f9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-98749794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dddfa1d6702d444faf82e9e456f124f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348fdb6-1e", "ovs_interfaceid": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 873.186642] env[61972]: DEBUG oslo_concurrency.lockutils [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] Acquired lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.186837] env[61972]: DEBUG nova.network.neutron [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Refreshing network info cache for port 6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 873.188109] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:39:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e839c46-1ae9-43b7-9518-8f18f48100dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6348fdb6-1e04-4d45-b3d2-e67eb05449f7', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.195581] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Creating folder: Project (dddfa1d6702d444faf82e9e456f124f9). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.199104] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a2d3ac0-d68f-4ded-a027-a725dfa90563 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.211029] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Created folder: Project (dddfa1d6702d444faf82e9e456f124f9) in parent group-v294799. [ 873.211198] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Creating folder: Instances. Parent ref: group-v294859. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.214669] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c646bb7-0b05-442c-8edd-b8bb0b16eaae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.224184] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Created folder: Instances in parent group-v294859. [ 873.224480] env[61972]: DEBUG oslo.service.loopingcall [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.224609] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.227587] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97905453-d73e-45c0-ad54-142352d5c661 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.243373] env[61972]: DEBUG nova.network.neutron [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.249950] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.249950] env[61972]: value = "task-1389276" [ 873.249950] env[61972]: _type = "Task" [ 873.249950] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.262065] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389276, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.272919] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2ea55b-870e-4734-a944-9eb7cac753b4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.280256] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca230dac-000d-4a40-ad10-f26a303518ba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.313414] env[61972]: DEBUG nova.network.neutron [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.315431] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b8d49c-480f-4df8-9241-ce5a02644e2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.330199] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6c485a-2aa7-466c-a12f-d6e5fae149b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.345687] env[61972]: DEBUG nova.compute.provider_tree [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.370262] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389273, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.760474] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389276, 'name': CreateVM_Task, 'duration_secs': 0.407628} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.760815] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.761628] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.762342] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.762342] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 873.762546] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c767763-ac60-4c01-b03f-11a8cf239079 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.767902] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 873.767902] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522c082a-2777-6798-bc02-515e6fa251a2" [ 873.767902] env[61972]: _type = "Task" [ 873.767902] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.775935] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]522c082a-2777-6798-bc02-515e6fa251a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.821621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Releasing lock "refresh_cache-667aff7f-57d5-4133-934d-386602a866f8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.822091] env[61972]: DEBUG nova.compute.manager [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 873.822325] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.823321] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e781d7-5d0d-4ff0-8c39-8feba99524c6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.829313] env[61972]: INFO nova.compute.manager [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Rebuilding instance [ 873.835736] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.836111] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f512d72e-20dc-420d-95ef-7f3f26f880ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.843448] env[61972]: DEBUG oslo_vmware.api [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 873.843448] env[61972]: value = "task-1389277" [ 873.843448] env[61972]: _type = "Task" [ 873.843448] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.850569] env[61972]: DEBUG nova.scheduler.client.report [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 873.862097] env[61972]: DEBUG oslo_vmware.api [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.870126] env[61972]: DEBUG oslo_vmware.api [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389273, 'name': PowerOnVM_Task, 'duration_secs': 0.645863} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.874054] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.874330] env[61972]: INFO nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Took 8.68 seconds to spawn the instance on the hypervisor. [ 873.874468] env[61972]: DEBUG nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 873.876068] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d5fad7-9b95-4ed4-921a-f6143d717c68 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.897170] env[61972]: DEBUG nova.compute.manager [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 873.898133] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07707e5-056e-4ac4-ac53-c7a3a771741d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.041181] env[61972]: DEBUG nova.network.neutron [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updated VIF entry in instance network info cache for port 6348fdb6-1e04-4d45-b3d2-e67eb05449f7. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 874.041576] env[61972]: DEBUG nova.network.neutron [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updating instance_info_cache with network_info: [{"id": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "address": "fa:16:3e:ef:39:8c", "network": {"id": "d8afcb85-a62b-495d-aaca-f790f02686f9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-98749794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dddfa1d6702d444faf82e9e456f124f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348fdb6-1e", "ovs_interfaceid": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.278594] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]522c082a-2777-6798-bc02-515e6fa251a2, 'name': SearchDatastore_Task, 'duration_secs': 0.013009} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.278594] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.279044] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.279044] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.279143] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.279330] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.279611] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-277a5bb2-4ccf-4b24-ab5d-24af94c9d377 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.289119] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.289333] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.290106] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6219b58c-4795-4270-9ae8-97024dcf5bb3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.295725] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 874.295725] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fd2727-14fd-6640-7573-4f43f51f63a1" [ 874.295725] env[61972]: _type = "Task" [ 874.295725] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.304090] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fd2727-14fd-6640-7573-4f43f51f63a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.353728] env[61972]: DEBUG oslo_vmware.api [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389277, 'name': PowerOffVM_Task, 'duration_secs': 0.129203} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.354296] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.354296] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.354460] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6d2dc37-f844-4991-b41f-71bc12acc828 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.357934] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.358441] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 874.360909] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.994s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.361159] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.363127] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.018s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.364767] env[61972]: INFO nova.compute.claims [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.381438] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.381711] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.382349] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Deleting the datastore file [datastore1] 667aff7f-57d5-4133-934d-386602a866f8 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.382349] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9aa241cb-477d-408d-87fb-b14656a800f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.394438] env[61972]: DEBUG oslo_vmware.api [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for the task: (returnval){ [ 874.394438] env[61972]: value = "task-1389279" [ 874.394438] env[61972]: _type = "Task" [ 874.394438] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.395393] env[61972]: INFO nova.scheduler.client.report [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Deleted allocations for instance caad50a8-e0ad-4ca9-b391-691ead1756f0 [ 874.402199] env[61972]: INFO nova.compute.manager [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Took 22.62 seconds to build instance. [ 874.414067] env[61972]: DEBUG oslo_vmware.api [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.544935] env[61972]: DEBUG oslo_concurrency.lockutils [req-17b53d30-02a7-4578-860f-57b9264cbeaa req-89645121-63da-4ecd-a489-fac0b97f99d8 service nova] Releasing lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.807220] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fd2727-14fd-6640-7573-4f43f51f63a1, 'name': SearchDatastore_Task, 'duration_secs': 0.021135} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.807984] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-940b58e8-f74a-4828-98da-435b6c314682 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.814420] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 874.814420] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b0e628-be11-f3ba-5224-c2cbda3b26ae" [ 874.814420] env[61972]: _type = "Task" [ 874.814420] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.824541] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b0e628-be11-f3ba-5224-c2cbda3b26ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.869195] env[61972]: DEBUG nova.compute.utils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.872848] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 874.873133] env[61972]: DEBUG nova.network.neutron [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.907330] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fff22241-6e07-4a51-ae21-6db52e0502aa tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.540s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.909615] env[61972]: DEBUG oslo_vmware.api [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Task: {'id': task-1389279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222046} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.909859] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.910446] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.910701] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.910965] env[61972]: INFO nova.compute.manager [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Took 1.09 seconds to destroy the instance on the hypervisor. [ 874.911167] env[61972]: DEBUG oslo.service.loopingcall [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.911596] env[61972]: DEBUG oslo_concurrency.lockutils [None req-95efe1de-25ff-4a6a-b61b-ee03535b1d0b tempest-MultipleCreateTestJSON-1937603900 tempest-MultipleCreateTestJSON-1937603900-project-member] Lock "caad50a8-e0ad-4ca9-b391-691ead1756f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.212s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.914431] env[61972]: DEBUG nova.compute.manager [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 874.914431] env[61972]: DEBUG nova.network.neutron [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 874.915609] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.915609] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c2a1c2c-ee0a-4e4e-a04c-b5c56616659f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.924562] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 874.924562] env[61972]: value = "task-1389280" [ 874.924562] env[61972]: _type = "Task" [ 874.924562] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.933235] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.938585] env[61972]: DEBUG nova.network.neutron [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.970330] env[61972]: DEBUG nova.policy [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fe11d10fde54adb9abf00a62e7b0bb7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ca7e7c695254785a21c5e7bc01e9851', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 875.325693] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b0e628-be11-f3ba-5224-c2cbda3b26ae, 'name': SearchDatastore_Task, 'duration_secs': 0.018461} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.326031] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.326407] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] b9726bf4-a4b1-4b22-840f-98157d0d790c/b9726bf4-a4b1-4b22-840f-98157d0d790c.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.326665] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06d5436d-157f-4022-aafa-d378a80340c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.335189] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 875.335189] env[61972]: value = "task-1389281" [ 875.335189] env[61972]: _type = "Task" [ 875.335189] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.345776] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.375405] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 875.421352] env[61972]: DEBUG nova.network.neutron [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Successfully created port: 7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 875.439329] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389280, 'name': PowerOffVM_Task, 'duration_secs': 0.28545} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.439629] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.439878] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.440895] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d0d297-2ef7-4ce4-8447-6ce886cdcf7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.443586] env[61972]: DEBUG nova.network.neutron [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.451248] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.451516] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f97ae522-674f-4f73-af09-651cf6a15353 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.675145] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3143aa05-50d2-4118-98bd-d43c514080db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.687154] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db947213-6021-46ae-8c3b-a993adaa83f2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.731081] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b35ba4-28cc-4037-85d7-73c53399fb44 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.734885] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.735061] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.738019] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleting the datastore file [datastore1] 942b00ba-a615-452d-a0c1-633d48d73fd4 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.738019] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ceaaef8a-9d51-4df6-97fc-8d5295ee3b78 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.744551] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e9f19a-542e-4dfc-a4a5-3c8ac07d0ca1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.748975] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 875.748975] env[61972]: value = "task-1389283" [ 875.748975] env[61972]: _type = "Task" [ 875.748975] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.764402] env[61972]: DEBUG nova.compute.provider_tree [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.773293] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.847728] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389281, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.946172] env[61972]: INFO nova.compute.manager [-] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Took 1.03 seconds to deallocate network for instance. [ 876.260251] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259658} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.260668] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.260934] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.261551] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.268311] env[61972]: DEBUG nova.scheduler.client.report [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 876.347973] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389281, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581239} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.348317] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] b9726bf4-a4b1-4b22-840f-98157d0d790c/b9726bf4-a4b1-4b22-840f-98157d0d790c.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 876.349016] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 876.349016] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c62180d9-adae-4b3e-9fbf-63348ddb62f5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.356575] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 876.356575] env[61972]: value = "task-1389284" [ 876.356575] env[61972]: _type = "Task" [ 876.356575] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.366732] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.392845] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 876.430586] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 876.431093] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 876.431333] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.431601] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 876.431823] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.432075] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 876.432376] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 876.432579] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 876.432864] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 876.433091] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 876.433348] env[61972]: DEBUG nova.virt.hardware [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 876.434673] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8956bd-211d-4800-aaeb-9719b75834ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.443831] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcd01ee-84d4-4d09-9c57-557daaf99654 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.460748] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.551054] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "a4e65047-a892-4f18-8a14-0f5de25ce235" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.551329] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.773310] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.773934] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 876.779025] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.873s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.779025] env[61972]: INFO nova.compute.claims [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.869497] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078264} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.870103] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 876.871908] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f364835a-6b8f-4289-9be6-5c781d8b4292 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.902524] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] b9726bf4-a4b1-4b22-840f-98157d0d790c/b9726bf4-a4b1-4b22-840f-98157d0d790c.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 876.903346] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d92d8414-8fcc-4b32-9a59-d12c8d8d2aa5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.925034] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 876.925034] env[61972]: value = "task-1389285" [ 876.925034] env[61972]: _type = "Task" [ 876.925034] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.933886] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.054238] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 877.282156] env[61972]: DEBUG nova.compute.utils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.289684] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 877.289871] env[61972]: DEBUG nova.network.neutron [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 877.312516] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.312516] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.312516] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.312824] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.312824] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.313245] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.313660] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.314070] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.314388] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.314679] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.316624] env[61972]: DEBUG nova.virt.hardware [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.316624] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab99c59-103c-4265-9223-c21354a88e85 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.328719] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839e607e-9106-421a-8d42-517c6142a98a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.337353] env[61972]: DEBUG nova.compute.manager [req-da25941a-e17f-4b9d-bf53-1bac8811323c req-a48d522e-8e0f-4eef-81b2-e0a06b83cd91 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Received event network-vif-plugged-7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 877.337811] env[61972]: DEBUG oslo_concurrency.lockutils [req-da25941a-e17f-4b9d-bf53-1bac8811323c req-a48d522e-8e0f-4eef-81b2-e0a06b83cd91 service nova] Acquiring lock "9a0463a0-dc96-41b1-8415-22011644ac0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.337811] env[61972]: DEBUG oslo_concurrency.lockutils [req-da25941a-e17f-4b9d-bf53-1bac8811323c req-a48d522e-8e0f-4eef-81b2-e0a06b83cd91 service nova] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.337944] env[61972]: DEBUG oslo_concurrency.lockutils [req-da25941a-e17f-4b9d-bf53-1bac8811323c req-a48d522e-8e0f-4eef-81b2-e0a06b83cd91 service nova] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.338137] env[61972]: DEBUG nova.compute.manager [req-da25941a-e17f-4b9d-bf53-1bac8811323c req-a48d522e-8e0f-4eef-81b2-e0a06b83cd91 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] No waiting events found dispatching network-vif-plugged-7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 877.338343] env[61972]: WARNING nova.compute.manager [req-da25941a-e17f-4b9d-bf53-1bac8811323c req-a48d522e-8e0f-4eef-81b2-e0a06b83cd91 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Received unexpected event network-vif-plugged-7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 for instance with vm_state building and task_state spawning. [ 877.350172] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:34:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db1b2713-6097-47ef-bec1-5ef54204a3da', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.358894] env[61972]: DEBUG oslo.service.loopingcall [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.360432] env[61972]: DEBUG nova.policy [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e79087f1765a41fc9712d57511aec36b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3b0ee0962334e68be3f7639d6fc559d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.362396] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.362645] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba5be26b-5bfa-40a9-b010-53c91ef26bef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.391885] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.391885] env[61972]: value = "task-1389286" [ 877.391885] env[61972]: _type = "Task" [ 877.391885] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.400643] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389286, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.438044] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.468764] env[61972]: DEBUG nova.network.neutron [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Successfully updated port: 7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.584979] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.708641] env[61972]: DEBUG nova.network.neutron [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Successfully created port: 08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.796453] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 877.904140] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389286, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.938886] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389285, 'name': ReconfigVM_Task, 'duration_secs': 0.605632} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.939298] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Reconfigured VM instance instance-0000004d to attach disk [datastore2] b9726bf4-a4b1-4b22-840f-98157d0d790c/b9726bf4-a4b1-4b22-840f-98157d0d790c.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 877.942705] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-042cffcd-8181-4936-b46f-f80edc891f06 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.950080] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 877.950080] env[61972]: value = "task-1389287" [ 877.950080] env[61972]: _type = "Task" [ 877.950080] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.961759] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389287, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.975849] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "refresh_cache-9a0463a0-dc96-41b1-8415-22011644ac0d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.976331] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquired lock "refresh_cache-9a0463a0-dc96-41b1-8415-22011644ac0d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.979026] env[61972]: DEBUG nova.network.neutron [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.053024] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70d04f6-29af-4020-86b7-f9cc89ba97e7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.062247] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8237415-cf74-4c9e-91d6-f29af6b13edc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.096262] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42de428-02f7-471c-9bbd-eb22e0070691 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.105299] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7426e9d7-c89e-4b02-8e57-6ef5285e4083 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.120794] env[61972]: DEBUG nova.compute.provider_tree [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.407816] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389286, 'name': CreateVM_Task, 'duration_secs': 0.626442} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.408128] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.408865] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.409079] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.409400] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.409788] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18675a87-e807-455d-9cd6-e086381f61e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.416193] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 878.416193] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525424ef-1255-5588-e38a-4c01dac29911" [ 878.416193] env[61972]: _type = "Task" [ 878.416193] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.424517] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525424ef-1255-5588-e38a-4c01dac29911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.460531] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389287, 'name': Rename_Task, 'duration_secs': 0.22452} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.460805] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.461100] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be0c70b5-5147-4e54-9c4c-bf9b46d4f989 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.469025] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 878.469025] env[61972]: value = "task-1389288" [ 878.469025] env[61972]: _type = "Task" [ 878.469025] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.476939] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.525587] env[61972]: DEBUG nova.network.neutron [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.624314] env[61972]: DEBUG nova.scheduler.client.report [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 878.760992] env[61972]: DEBUG nova.network.neutron [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Updating instance_info_cache with network_info: [{"id": "7c33b0c3-d8f5-4df1-8f8a-62ae44204b61", "address": "fa:16:3e:d3:22:4a", "network": {"id": "7e3a6248-91b2-45c3-9eb6-d4e8364ffc26", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-522872650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ca7e7c695254785a21c5e7bc01e9851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c33b0c3-d8", "ovs_interfaceid": "7c33b0c3-d8f5-4df1-8f8a-62ae44204b61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.806456] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 878.829840] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.830138] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.830306] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.830509] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.830731] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.830895] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.831152] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.831334] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.831540] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.831716] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.831894] env[61972]: DEBUG nova.virt.hardware [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.832798] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7a8937-0414-487c-83c6-6d7869d6fe06 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.840895] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5913113d-56fd-4fb0-88e4-e2ed572e8500 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.926502] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525424ef-1255-5588-e38a-4c01dac29911, 'name': SearchDatastore_Task, 'duration_secs': 0.010718} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.926717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.926870] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.927126] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.927281] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.927602] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.927941] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43ba09c1-7f58-4045-9ec8-d076ed1f2791 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.938172] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.938515] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.939668] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec337e8a-3ec0-4025-aa94-bd796c9de6cf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.945487] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 878.945487] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5279e408-413f-7f50-f596-eaf9a4e98155" [ 878.945487] env[61972]: _type = "Task" [ 878.945487] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.955753] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5279e408-413f-7f50-f596-eaf9a4e98155, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.978321] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389288, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.131743] env[61972]: DEBUG nova.compute.manager [req-8fedb16a-7556-4d01-88f3-cf072e2f2806 req-4092ff06-bf1d-4a43-8faa-f0fcf40bcdec service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Received event network-vif-plugged-08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 879.131989] env[61972]: DEBUG oslo_concurrency.lockutils [req-8fedb16a-7556-4d01-88f3-cf072e2f2806 req-4092ff06-bf1d-4a43-8faa-f0fcf40bcdec service nova] Acquiring lock "489f1de0-d1c8-4429-a6f1-24ea885282f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.132239] env[61972]: DEBUG oslo_concurrency.lockutils [req-8fedb16a-7556-4d01-88f3-cf072e2f2806 req-4092ff06-bf1d-4a43-8faa-f0fcf40bcdec service nova] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.132457] env[61972]: DEBUG oslo_concurrency.lockutils [req-8fedb16a-7556-4d01-88f3-cf072e2f2806 req-4092ff06-bf1d-4a43-8faa-f0fcf40bcdec service nova] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.132602] env[61972]: DEBUG nova.compute.manager [req-8fedb16a-7556-4d01-88f3-cf072e2f2806 req-4092ff06-bf1d-4a43-8faa-f0fcf40bcdec service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] No waiting events found dispatching network-vif-plugged-08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 879.132810] env[61972]: WARNING nova.compute.manager [req-8fedb16a-7556-4d01-88f3-cf072e2f2806 req-4092ff06-bf1d-4a43-8faa-f0fcf40bcdec service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Received unexpected event network-vif-plugged-08285af7-59f1-4c6a-acd6-ef8aa2fd1506 for instance with vm_state building and task_state spawning. [ 879.133591] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.134073] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 879.141295] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.505s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.141295] env[61972]: INFO nova.compute.claims [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 879.251358] env[61972]: DEBUG nova.network.neutron [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Successfully updated port: 08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.264084] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Releasing lock "refresh_cache-9a0463a0-dc96-41b1-8415-22011644ac0d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.264778] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Instance network_info: |[{"id": "7c33b0c3-d8f5-4df1-8f8a-62ae44204b61", "address": "fa:16:3e:d3:22:4a", "network": {"id": "7e3a6248-91b2-45c3-9eb6-d4e8364ffc26", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-522872650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ca7e7c695254785a21c5e7bc01e9851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c33b0c3-d8", "ovs_interfaceid": "7c33b0c3-d8f5-4df1-8f8a-62ae44204b61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 879.265642] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:22:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c33b0c3-d8f5-4df1-8f8a-62ae44204b61', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.275465] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Creating folder: Project (2ca7e7c695254785a21c5e7bc01e9851). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.276406] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01cead07-8c2f-49ac-9b8c-e9a59acc5d4a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.291991] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Created folder: Project (2ca7e7c695254785a21c5e7bc01e9851) in parent group-v294799. [ 879.292252] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Creating folder: Instances. Parent ref: group-v294863. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.292896] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a32c5853-0f55-4463-85c1-a5d7b7e32ed9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.303851] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Created folder: Instances in parent group-v294863. [ 879.304309] env[61972]: DEBUG oslo.service.loopingcall [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.304424] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.304683] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8740c14-9945-40f9-97b7-55b04bb81db1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.328809] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.328809] env[61972]: value = "task-1389291" [ 879.328809] env[61972]: _type = "Task" [ 879.328809] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.337696] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389291, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.361350] env[61972]: DEBUG nova.compute.manager [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Received event network-changed-7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 879.361575] env[61972]: DEBUG nova.compute.manager [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Refreshing instance network info cache due to event network-changed-7c33b0c3-d8f5-4df1-8f8a-62ae44204b61. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 879.361800] env[61972]: DEBUG oslo_concurrency.lockutils [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] Acquiring lock "refresh_cache-9a0463a0-dc96-41b1-8415-22011644ac0d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.361980] env[61972]: DEBUG oslo_concurrency.lockutils [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] Acquired lock "refresh_cache-9a0463a0-dc96-41b1-8415-22011644ac0d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.362125] env[61972]: DEBUG nova.network.neutron [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Refreshing network info cache for port 7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.418617] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523cea9d-257e-466e-c1c1-e9a24ee63adb/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 879.419755] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb17971-0f6e-4ec4-956b-50b4119224b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.427324] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523cea9d-257e-466e-c1c1-e9a24ee63adb/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 879.427515] env[61972]: ERROR oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523cea9d-257e-466e-c1c1-e9a24ee63adb/disk-0.vmdk due to incomplete transfer. [ 879.427758] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9777f73c-031b-4b6b-8736-6b4a646a20d3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.435483] env[61972]: DEBUG oslo_vmware.rw_handles [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523cea9d-257e-466e-c1c1-e9a24ee63adb/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 879.435694] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Uploaded image 1955dd95-d842-4a9b-943b-cc10e94d7867 to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 879.437919] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 879.438635] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-19055984-466f-402a-8460-471824064a56 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.446214] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 879.446214] env[61972]: value = "task-1389292" [ 879.446214] env[61972]: _type = "Task" [ 879.446214] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.462061] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5279e408-413f-7f50-f596-eaf9a4e98155, 'name': SearchDatastore_Task, 'duration_secs': 0.010349} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.462061] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389292, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.462928] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-816a16ef-d385-489e-96a6-1c37f00d273a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.469873] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 879.469873] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c99df9-a49b-f4bf-6269-0b3a7da93321" [ 879.469873] env[61972]: _type = "Task" [ 879.469873] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.486704] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c99df9-a49b-f4bf-6269-0b3a7da93321, 'name': SearchDatastore_Task, 'duration_secs': 0.011322} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.487302] env[61972]: DEBUG oslo_vmware.api [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389288, 'name': PowerOnVM_Task, 'duration_secs': 0.713657} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.487302] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.487522] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 879.487804] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.488038] env[61972]: INFO nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Took 9.97 seconds to spawn the instance on the hypervisor. [ 879.488228] env[61972]: DEBUG nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 879.488649] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbe87428-56b4-408a-bc37-a5efbbee0424 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.491416] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367511ae-3b5d-45ef-a65b-3d594d33c2df {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.503578] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 879.503578] env[61972]: value = "task-1389293" [ 879.503578] env[61972]: _type = "Task" [ 879.503578] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.512820] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.639926] env[61972]: DEBUG nova.compute.utils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.641756] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 879.641963] env[61972]: DEBUG nova.network.neutron [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.706892] env[61972]: DEBUG nova.policy [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3254aac3e99d474e95798cb85f2bf5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9266fa0d01664ba4a80ff4068cb9b9bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.754606] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "refresh_cache-489f1de0-d1c8-4429-a6f1-24ea885282f3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.754806] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquired lock "refresh_cache-489f1de0-d1c8-4429-a6f1-24ea885282f3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.755098] env[61972]: DEBUG nova.network.neutron [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.841952] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389291, 'name': CreateVM_Task, 'duration_secs': 0.391787} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.842235] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.843127] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.843375] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.843769] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 879.844122] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f745496b-960c-4e74-8527-2513204f9b8d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.850268] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 879.850268] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cd7ae4-b17c-86f5-002c-5eeff422b992" [ 879.850268] env[61972]: _type = "Task" [ 879.850268] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.860385] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cd7ae4-b17c-86f5-002c-5eeff422b992, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.959779] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389292, 'name': Destroy_Task, 'duration_secs': 0.464196} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.960085] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Destroyed the VM [ 879.960998] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 879.960998] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-505b5814-9fd8-481e-9b41-59f844e8a369 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.968975] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 879.968975] env[61972]: value = "task-1389294" [ 879.968975] env[61972]: _type = "Task" [ 879.968975] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.979250] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389294, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.019429] env[61972]: INFO nova.compute.manager [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Took 27.17 seconds to build instance. [ 880.026658] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389293, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.091083] env[61972]: DEBUG nova.network.neutron [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Successfully created port: 89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.145846] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 880.185824] env[61972]: DEBUG nova.network.neutron [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Updated VIF entry in instance network info cache for port 7c33b0c3-d8f5-4df1-8f8a-62ae44204b61. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.185824] env[61972]: DEBUG nova.network.neutron [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Updating instance_info_cache with network_info: [{"id": "7c33b0c3-d8f5-4df1-8f8a-62ae44204b61", "address": "fa:16:3e:d3:22:4a", "network": {"id": "7e3a6248-91b2-45c3-9eb6-d4e8364ffc26", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-522872650-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ca7e7c695254785a21c5e7bc01e9851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c33b0c3-d8", "ovs_interfaceid": "7c33b0c3-d8f5-4df1-8f8a-62ae44204b61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.307022] env[61972]: DEBUG nova.network.neutron [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.360811] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cd7ae4-b17c-86f5-002c-5eeff422b992, 'name': SearchDatastore_Task, 'duration_secs': 0.055676} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.361142] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.361386] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.361614] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.361763] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.361945] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.362254] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d55aa81-b629-45a0-8772-67ce2af0c716 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.374342] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.374593] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.375278] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78f2b4ea-fe71-47c1-bae6-d05fa0a50faa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.386608] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 880.386608] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e1491d-158e-9f3f-e440-52182a10720c" [ 880.386608] env[61972]: _type = "Task" [ 880.386608] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.404120] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e1491d-158e-9f3f-e440-52182a10720c, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.404954] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-440a9740-9533-4125-ab12-a142f647fc11 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.412341] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 880.412341] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e268e1-81a4-a649-02e8-b7849d0fe0fe" [ 880.412341] env[61972]: _type = "Task" [ 880.412341] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.425166] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e268e1-81a4-a649-02e8-b7849d0fe0fe, 'name': SearchDatastore_Task, 'duration_secs': 0.010226} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.425524] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.425712] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 9a0463a0-dc96-41b1-8415-22011644ac0d/9a0463a0-dc96-41b1-8415-22011644ac0d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.426118] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-932516cc-e3f2-4ba8-8c14-f8e78776beda {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.434588] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 880.434588] env[61972]: value = "task-1389295" [ 880.434588] env[61972]: _type = "Task" [ 880.434588] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.446063] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389295, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.446777] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ec92db-d489-4c42-9ca2-30c0a309cf41 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.453839] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7600e9c7-d830-4a72-92af-a8e74f9f0036 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.497942] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa594e2-967e-4302-92de-a8b22c603871 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.509232] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389294, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.511035] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5cc3df-5a15-4c2e-814a-d4d7e0048782 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.528954] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042d1285-b752-4eaa-bfdb-469d92444769 tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.707s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.531372] env[61972]: DEBUG nova.compute.provider_tree [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.536021] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389293, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551686} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.537662] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.537662] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.537662] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63a042b0-c72d-402d-8319-aae9f3538d55 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.547264] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 880.547264] env[61972]: value = "task-1389296" [ 880.547264] env[61972]: _type = "Task" [ 880.547264] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.557766] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.601515] env[61972]: DEBUG nova.network.neutron [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Updating instance_info_cache with network_info: [{"id": "08285af7-59f1-4c6a-acd6-ef8aa2fd1506", "address": "fa:16:3e:3c:65:95", "network": {"id": "6758aea6-c6c7-410a-9a5a-0e9a99a4d739", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-978910950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b0ee0962334e68be3f7639d6fc559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08285af7-59", "ovs_interfaceid": "08285af7-59f1-4c6a-acd6-ef8aa2fd1506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.688360] env[61972]: DEBUG oslo_concurrency.lockutils [req-63a6d22a-f9aa-447e-a30a-26348212e15f req-cf9377c8-572e-4491-b67d-2991c09b84e2 service nova] Releasing lock "refresh_cache-9a0463a0-dc96-41b1-8415-22011644ac0d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.945127] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389295, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.999215] env[61972]: DEBUG oslo_vmware.api [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389294, 'name': RemoveSnapshot_Task, 'duration_secs': 0.735351} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.999371] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 880.999609] env[61972]: INFO nova.compute.manager [None req-64f60667-b858-43aa-baff-253fd4969acc tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Took 15.85 seconds to snapshot the instance on the hypervisor. [ 881.038274] env[61972]: DEBUG nova.scheduler.client.report [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 881.059686] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076123} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.059954] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.060838] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96e124f-7fc0-420a-9340-a00729bb8417 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.084197] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.084484] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33e65294-2aeb-4e86-8466-efb90e069c9f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.105271] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Releasing lock "refresh_cache-489f1de0-d1c8-4429-a6f1-24ea885282f3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.105563] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Instance network_info: |[{"id": "08285af7-59f1-4c6a-acd6-ef8aa2fd1506", "address": "fa:16:3e:3c:65:95", "network": {"id": "6758aea6-c6c7-410a-9a5a-0e9a99a4d739", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-978910950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b0ee0962334e68be3f7639d6fc559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08285af7-59", "ovs_interfaceid": "08285af7-59f1-4c6a-acd6-ef8aa2fd1506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 881.105918] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 881.105918] env[61972]: value = "task-1389297" [ 881.105918] env[61972]: _type = "Task" [ 881.105918] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.106257] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:65:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec1528b-3e87-477b-8ab2-02696ad47e66', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08285af7-59f1-4c6a-acd6-ef8aa2fd1506', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.113792] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Creating folder: Project (b3b0ee0962334e68be3f7639d6fc559d). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.114109] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-844184a3-52f4-4ab8-ae2c-a3e1a442156a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.124484] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.127816] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Created folder: Project (b3b0ee0962334e68be3f7639d6fc559d) in parent group-v294799. [ 881.128035] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Creating folder: Instances. Parent ref: group-v294866. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.128333] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87a7ea61-e71f-47f1-9746-18366286ef2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.139392] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Created folder: Instances in parent group-v294866. [ 881.139643] env[61972]: DEBUG oslo.service.loopingcall [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.139841] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.140063] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-023f4581-2328-4535-b57d-0f3259d574cb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.158911] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 881.165333] env[61972]: DEBUG nova.compute.manager [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Received event network-changed-08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 881.165527] env[61972]: DEBUG nova.compute.manager [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Refreshing instance network info cache due to event network-changed-08285af7-59f1-4c6a-acd6-ef8aa2fd1506. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 881.165741] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] Acquiring lock "refresh_cache-489f1de0-d1c8-4429-a6f1-24ea885282f3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.165885] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] Acquired lock "refresh_cache-489f1de0-d1c8-4429-a6f1-24ea885282f3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.166059] env[61972]: DEBUG nova.network.neutron [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Refreshing network info cache for port 08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.171024] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.171024] env[61972]: value = "task-1389300" [ 881.171024] env[61972]: _type = "Task" [ 881.171024] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.190886] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389300, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.193267] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.193483] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.193642] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.193824] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.193971] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.194139] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.194345] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.194506] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.194670] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.194832] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.195017] env[61972]: DEBUG nova.virt.hardware [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.196117] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddd22cf-30b1-421a-8374-53166af963ba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.204562] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c44e5b0-1768-4cca-b047-12f71185d30d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.447797] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389295, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520884} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.447797] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 9a0463a0-dc96-41b1-8415-22011644ac0d/9a0463a0-dc96-41b1-8415-22011644ac0d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.448269] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.448334] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9efe7514-24e7-4dc4-9acb-64100061bc41 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.456855] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 881.456855] env[61972]: value = "task-1389301" [ 881.456855] env[61972]: _type = "Task" [ 881.456855] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.466410] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389301, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.543864] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.543864] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 881.546429] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.772s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.546593] env[61972]: DEBUG nova.objects.instance [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61972) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 881.624367] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389297, 'name': ReconfigVM_Task, 'duration_secs': 0.392525} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.624668] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 942b00ba-a615-452d-a0c1-633d48d73fd4/942b00ba-a615-452d-a0c1-633d48d73fd4.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.625392] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-debdb663-bb44-4e61-990d-7f3338fcdaf3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.637258] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 881.637258] env[61972]: value = "task-1389302" [ 881.637258] env[61972]: _type = "Task" [ 881.637258] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.648194] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389302, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.680274] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389300, 'name': CreateVM_Task, 'duration_secs': 0.370563} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.680428] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.681131] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.681311] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.681631] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.681900] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0177203e-9999-476c-bdf1-5609d0227185 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.687580] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 881.687580] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]529dad43-4591-f299-9432-c2dca93c28f0" [ 881.687580] env[61972]: _type = "Task" [ 881.687580] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.696528] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529dad43-4591-f299-9432-c2dca93c28f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.967675] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389301, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067966} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.968086] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.969036] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4a31fe-f163-4f77-a247-259e9b3e3768 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.994920] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 9a0463a0-dc96-41b1-8415-22011644ac0d/9a0463a0-dc96-41b1-8415-22011644ac0d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.997581] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d096483-22cd-44f1-a5f4-8c1b6853ec15 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.019400] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 882.019400] env[61972]: value = "task-1389303" [ 882.019400] env[61972]: _type = "Task" [ 882.019400] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.028261] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389303, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.052463] env[61972]: DEBUG nova.compute.utils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 882.059629] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 882.059822] env[61972]: DEBUG nova.network.neutron [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 882.118549] env[61972]: DEBUG nova.policy [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6b7c5b037a54c8cbd151ad0f1875f37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbbaa322b60942819cfb147b5201daf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 882.149086] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389302, 'name': Rename_Task, 'duration_secs': 0.149367} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.149448] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.149799] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1310cc78-1dd6-443f-ab3d-e4c2d23f6b7f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.158660] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 882.158660] env[61972]: value = "task-1389304" [ 882.158660] env[61972]: _type = "Task" [ 882.158660] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.169803] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.199649] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529dad43-4591-f299-9432-c2dca93c28f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010245} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.200162] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.201021] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.201021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.201021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.201270] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.201622] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cffb6812-9e74-4dbe-b441-948a55d82566 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.215847] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.216210] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.216975] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d3cac61-724d-47a2-af26-58ae6044df55 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.223753] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 882.223753] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a9e47a-316c-1575-447e-9b7fc216f51c" [ 882.223753] env[61972]: _type = "Task" [ 882.223753] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.233976] env[61972]: DEBUG nova.compute.manager [req-5d816a03-d0d7-400f-9855-98cbf1224bbb req-08c5dc10-7fc8-45ab-b469-e9612e5931e8 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-vif-plugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 882.234364] env[61972]: DEBUG oslo_concurrency.lockutils [req-5d816a03-d0d7-400f-9855-98cbf1224bbb req-08c5dc10-7fc8-45ab-b469-e9612e5931e8 service nova] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.234483] env[61972]: DEBUG oslo_concurrency.lockutils [req-5d816a03-d0d7-400f-9855-98cbf1224bbb req-08c5dc10-7fc8-45ab-b469-e9612e5931e8 service nova] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.234689] env[61972]: DEBUG oslo_concurrency.lockutils [req-5d816a03-d0d7-400f-9855-98cbf1224bbb req-08c5dc10-7fc8-45ab-b469-e9612e5931e8 service nova] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.234820] env[61972]: DEBUG nova.compute.manager [req-5d816a03-d0d7-400f-9855-98cbf1224bbb req-08c5dc10-7fc8-45ab-b469-e9612e5931e8 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] No waiting events found dispatching network-vif-plugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 882.234990] env[61972]: WARNING nova.compute.manager [req-5d816a03-d0d7-400f-9855-98cbf1224bbb req-08c5dc10-7fc8-45ab-b469-e9612e5931e8 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received unexpected event network-vif-plugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b for instance with vm_state building and task_state spawning. [ 882.239728] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a9e47a-316c-1575-447e-9b7fc216f51c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.263659] env[61972]: DEBUG nova.network.neutron [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Updated VIF entry in instance network info cache for port 08285af7-59f1-4c6a-acd6-ef8aa2fd1506. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 882.264359] env[61972]: DEBUG nova.network.neutron [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Updating instance_info_cache with network_info: [{"id": "08285af7-59f1-4c6a-acd6-ef8aa2fd1506", "address": "fa:16:3e:3c:65:95", "network": {"id": "6758aea6-c6c7-410a-9a5a-0e9a99a4d739", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-978910950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b0ee0962334e68be3f7639d6fc559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08285af7-59", "ovs_interfaceid": "08285af7-59f1-4c6a-acd6-ef8aa2fd1506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.411396] env[61972]: DEBUG nova.network.neutron [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Successfully created port: 635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.532393] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389303, 'name': ReconfigVM_Task, 'duration_secs': 0.325969} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.532787] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 9a0463a0-dc96-41b1-8415-22011644ac0d/9a0463a0-dc96-41b1-8415-22011644ac0d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.533404] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07702f44-3c73-4e39-b4c7-2141d2693bad {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.541343] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 882.541343] env[61972]: value = "task-1389305" [ 882.541343] env[61972]: _type = "Task" [ 882.541343] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.551870] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389305, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.555599] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 882.561466] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f1156ee-f0c9-46cc-862e-5b851d64f9d1 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.562627] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.400s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.564030] env[61972]: INFO nova.compute.claims [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.647517] env[61972]: DEBUG nova.network.neutron [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Successfully updated port: 89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.672216] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389304, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.740753] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a9e47a-316c-1575-447e-9b7fc216f51c, 'name': SearchDatastore_Task, 'duration_secs': 0.013257} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.742240] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baa057d0-e62e-4eba-9273-452471c8797c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.750225] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 882.750225] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc8db8-8186-c53c-fed0-eb18d6acdc48" [ 882.750225] env[61972]: _type = "Task" [ 882.750225] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.761689] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc8db8-8186-c53c-fed0-eb18d6acdc48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.767224] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] Releasing lock "refresh_cache-489f1de0-d1c8-4429-a6f1-24ea885282f3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.767517] env[61972]: DEBUG nova.compute.manager [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Received event network-changed-6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 882.767712] env[61972]: DEBUG nova.compute.manager [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Refreshing instance network info cache due to event network-changed-6348fdb6-1e04-4d45-b3d2-e67eb05449f7. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 882.768360] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] Acquiring lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.768360] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] Acquired lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.768360] env[61972]: DEBUG nova.network.neutron [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Refreshing network info cache for port 6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.056030] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389305, 'name': Rename_Task, 'duration_secs': 0.282411} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.056030] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.056236] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4caaf0b0-9ff2-4f74-a90e-58c3f354ae63 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.064873] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 883.064873] env[61972]: value = "task-1389306" [ 883.064873] env[61972]: _type = "Task" [ 883.064873] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.077846] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.151680] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.151908] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.151978] env[61972]: DEBUG nova.network.neutron [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.188693] env[61972]: DEBUG nova.compute.manager [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 883.189047] env[61972]: DEBUG nova.compute.manager [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing instance network info cache due to event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 883.189384] env[61972]: DEBUG oslo_concurrency.lockutils [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.195431] env[61972]: DEBUG oslo_vmware.api [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389304, 'name': PowerOnVM_Task, 'duration_secs': 0.71397} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.195801] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.196132] env[61972]: DEBUG nova.compute.manager [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 883.197423] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314c72a3-803a-4ab9-b613-fe4042551d67 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.264573] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc8db8-8186-c53c-fed0-eb18d6acdc48, 'name': SearchDatastore_Task, 'duration_secs': 0.032169} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.264963] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.265364] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 489f1de0-d1c8-4429-a6f1-24ea885282f3/489f1de0-d1c8-4429-a6f1-24ea885282f3.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.265726] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c80da091-0a26-4db5-a554-cbe02a227d87 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.278387] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 883.278387] env[61972]: value = "task-1389307" [ 883.278387] env[61972]: _type = "Task" [ 883.278387] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.290263] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389307, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.565500] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 883.568278] env[61972]: DEBUG nova.network.neutron [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updated VIF entry in instance network info cache for port 6348fdb6-1e04-4d45-b3d2-e67eb05449f7. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.568658] env[61972]: DEBUG nova.network.neutron [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updating instance_info_cache with network_info: [{"id": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "address": "fa:16:3e:ef:39:8c", "network": {"id": "d8afcb85-a62b-495d-aaca-f790f02686f9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-98749794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dddfa1d6702d444faf82e9e456f124f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348fdb6-1e", "ovs_interfaceid": "6348fdb6-1e04-4d45-b3d2-e67eb05449f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.583462] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389306, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.602910] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d565607bf1e7e7d08ab1c5e21740f50f',container_format='bare',created_at=2024-10-31T12:15:18Z,direct_url=,disk_format='vmdk',id=484207c6-3534-46e0-becb-1716957998ac,min_disk=1,min_ram=0,name='tempest-test-snap-659323194',owner='dbbaa322b60942819cfb147b5201daf4',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-31T12:15:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.603254] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.603429] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.603621] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.603778] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.603986] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.604228] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.604392] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.604564] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.604732] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.604925] env[61972]: DEBUG nova.virt.hardware [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.606462] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325fa80c-dae2-4120-b0f0-f64c6582e735 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.618999] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a95828c-25d4-4414-9295-16c2321562a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.687391] env[61972]: DEBUG nova.network.neutron [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.721381] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.738703] env[61972]: DEBUG nova.compute.manager [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 883.739771] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65056dbe-1fe3-43e7-a201-cb65126327fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.794065] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389307, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.881123] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29330c01-1f57-40da-adbb-5fd0581f030d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.897095] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18195436-04ff-451e-8d6b-f8f6e139646c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.936176] env[61972]: DEBUG nova.network.neutron [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.940682] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4d8762-3fe0-45cb-893d-3c4fd3aba85d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.949382] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db5e41a-7ed7-4bf7-a155-2f3c3a823615 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.969932] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.077080] env[61972]: DEBUG oslo_concurrency.lockutils [req-9a99ac48-2a61-41ea-96f1-072d689c5f51 req-0eccba5d-129b-4b1a-a20a-74351e7eea8b service nova] Releasing lock "refresh_cache-b9726bf4-a4b1-4b22-840f-98157d0d790c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.084582] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389306, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.148155] env[61972]: DEBUG nova.network.neutron [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Successfully updated port: 635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.257051] env[61972]: INFO nova.compute.manager [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] instance snapshotting [ 884.259646] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a11f8a5-fdb1-432e-8221-6462258a5fe5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.279841] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f810563-6139-4c33-a842-ba53dd3035e1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.300030] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389307, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.788634} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.300030] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 489f1de0-d1c8-4429-a6f1-24ea885282f3/489f1de0-d1c8-4429-a6f1-24ea885282f3.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.300030] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.300030] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e814644-c694-4fb1-a7a0-a68a9a76aaa2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.308814] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 884.308814] env[61972]: value = "task-1389308" [ 884.308814] env[61972]: _type = "Task" [ 884.308814] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.320083] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389308, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.442930] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.443312] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance network_info: |[{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 884.443725] env[61972]: DEBUG oslo_concurrency.lockutils [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.443964] env[61972]: DEBUG nova.network.neutron [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.445188] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:43:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89e228e1-2aac-4e05-98ee-5c29dd44f55b', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.454660] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating folder: Project (9266fa0d01664ba4a80ff4068cb9b9bc). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.459289] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2aba4d1-5f2d-4846-9c7d-b084d41d6aae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.474009] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Created folder: Project (9266fa0d01664ba4a80ff4068cb9b9bc) in parent group-v294799. [ 884.474338] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating folder: Instances. Parent ref: group-v294869. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.477877] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-406e8af2-3483-4ce3-b5f1-caea188341f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.490565] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Created folder: Instances in parent group-v294869. [ 884.490935] env[61972]: DEBUG oslo.service.loopingcall [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.491124] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.491350] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93e2d61b-ceaa-436c-b15f-5ea1d310616d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.508121] env[61972]: ERROR nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [req-d84334d6-2037-4e53-91ce-f0fa32ac306c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d84334d6-2037-4e53-91ce-f0fa32ac306c"}]} [ 884.521020] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.521020] env[61972]: value = "task-1389311" [ 884.521020] env[61972]: _type = "Task" [ 884.521020] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.529676] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389311, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.530875] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 884.548394] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 884.548628] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.564932] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 884.588245] env[61972]: DEBUG oslo_vmware.api [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389306, 'name': PowerOnVM_Task, 'duration_secs': 1.024621} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.588733] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.588886] env[61972]: INFO nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Took 8.20 seconds to spawn the instance on the hypervisor. [ 884.589088] env[61972]: DEBUG nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 884.590190] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 884.593482] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7294452-2d6c-4dba-ac8e-6274aa83e9f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.650949] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.651178] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.651271] env[61972]: DEBUG nova.network.neutron [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.744278] env[61972]: DEBUG nova.network.neutron [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updated VIF entry in instance network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.744619] env[61972]: DEBUG nova.network.neutron [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.799094] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "942b00ba-a615-452d-a0c1-633d48d73fd4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.799450] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.799713] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "942b00ba-a615-452d-a0c1-633d48d73fd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.799940] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.800174] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.803049] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 884.803606] env[61972]: INFO nova.compute.manager [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Terminating instance [ 884.805108] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-78944adb-1253-4062-ba63-8e148c2579a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.818307] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 884.818307] env[61972]: value = "task-1389312" [ 884.818307] env[61972]: _type = "Task" [ 884.818307] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.827595] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389308, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094521} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.830590] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.830590] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a48bb8-a228-43d1-9d05-5c7cd8a17cd7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.837973] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389312, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.862447] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 489f1de0-d1c8-4429-a6f1-24ea885282f3/489f1de0-d1c8-4429-a6f1-24ea885282f3.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.865876] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb3a95e2-6aa6-4855-b593-d35e2c2d2a58 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.920072] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5b7f22-5815-4bc1-9f73-1bc68cf42eff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.923749] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 884.923749] env[61972]: value = "task-1389313" [ 884.923749] env[61972]: _type = "Task" [ 884.923749] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.932497] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b3c354-a4ee-41fd-9c8f-7a300ee99658 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.944325] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.981385] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5800b4c-6292-4788-a05c-6c83c3d05eb6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.996912] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4483b20b-3317-451f-a382-a8c18fb106cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.012713] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 885.029708] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389311, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.117683] env[61972]: INFO nova.compute.manager [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Took 28.78 seconds to build instance. [ 885.191952] env[61972]: DEBUG nova.network.neutron [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.222595] env[61972]: DEBUG nova.compute.manager [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Received event network-vif-plugged-635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 885.222917] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] Acquiring lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.223233] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.223493] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.223748] env[61972]: DEBUG nova.compute.manager [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] No waiting events found dispatching network-vif-plugged-635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.224009] env[61972]: WARNING nova.compute.manager [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Received unexpected event network-vif-plugged-635a0344-5f1c-4ac7-be41-f83183d9145d for instance with vm_state building and task_state spawning. [ 885.224273] env[61972]: DEBUG nova.compute.manager [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Received event network-changed-635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 885.224513] env[61972]: DEBUG nova.compute.manager [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Refreshing instance network info cache due to event network-changed-635a0344-5f1c-4ac7-be41-f83183d9145d. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 885.224763] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] Acquiring lock "refresh_cache-a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.247254] env[61972]: DEBUG oslo_concurrency.lockutils [req-75c62c2a-3793-409e-993d-411484d162f6 req-aedd3837-d73e-48ea-a7cb-e470981e7409 service nova] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.309746] env[61972]: DEBUG nova.compute.manager [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 885.310269] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 885.311872] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937f18de-6867-4f9c-a83b-0b3587ad0294 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.330370] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389312, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.333238] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.333689] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6d834fb-9aa6-46f8-a045-a579542122a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.346505] env[61972]: DEBUG oslo_vmware.api [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 885.346505] env[61972]: value = "task-1389314" [ 885.346505] env[61972]: _type = "Task" [ 885.346505] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.357336] env[61972]: DEBUG oslo_vmware.api [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.366408] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "84e07f61-2111-43cb-93a2-9cb47ac52503" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.366805] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.383057] env[61972]: DEBUG nova.network.neutron [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Updating instance_info_cache with network_info: [{"id": "635a0344-5f1c-4ac7-be41-f83183d9145d", "address": "fa:16:3e:24:3c:95", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap635a0344-5f", "ovs_interfaceid": "635a0344-5f1c-4ac7-be41-f83183d9145d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.436591] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389313, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.530958] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389311, 'name': CreateVM_Task, 'duration_secs': 0.652669} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.531179] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.531867] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.532051] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.532400] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 885.532658] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eade3fd0-5ecb-46df-8e19-8f7b66c22167 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.538848] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 885.538848] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f7f24a-96ef-3365-8beb-89a0c3f5010b" [ 885.538848] env[61972]: _type = "Task" [ 885.538848] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.543165] env[61972]: ERROR nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [req-fccaeb20-93ee-429b-8b00-bb71dead4a35] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fccaeb20-93ee-429b-8b00-bb71dead4a35"}]} [ 885.551690] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f7f24a-96ef-3365-8beb-89a0c3f5010b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.560210] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 885.576349] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 885.576573] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 885.588705] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 885.609140] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 885.620385] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a7eeec23-6275-4493-8ef8-692ab9ee5791 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.428s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.833646] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389312, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.853739] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533711be-234f-4f5c-81a1-3669e69498ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.862514] env[61972]: DEBUG oslo_vmware.api [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389314, 'name': PowerOffVM_Task, 'duration_secs': 0.198832} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.864544] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.864764] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.865048] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06fe56ba-f162-4942-b72d-9000b8fd88a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.867284] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aee7790-db4b-4b57-8e3a-de4ecf305363 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.870460] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 885.903236] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.903465] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Instance network_info: |[{"id": "635a0344-5f1c-4ac7-be41-f83183d9145d", "address": "fa:16:3e:24:3c:95", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap635a0344-5f", "ovs_interfaceid": "635a0344-5f1c-4ac7-be41-f83183d9145d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 885.904737] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] Acquired lock "refresh_cache-a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.904923] env[61972]: DEBUG nova.network.neutron [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Refreshing network info cache for port 635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.906227] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:3c:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '838c9497-35dd-415e-96c7-8dc21b0cd4b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '635a0344-5f1c-4ac7-be41-f83183d9145d', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 885.913611] env[61972]: DEBUG oslo.service.loopingcall [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.914383] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26ce5ec-7a87-458a-97ec-142a6605265d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.920458] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 885.920998] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2016593c-f66d-4c11-aaeb-69f87906f95f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.946088] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4792480-2873-4a5a-b665-bb62b0b3183f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.954481] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.955539] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.955539] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleting the datastore file [datastore2] 942b00ba-a615-452d-a0c1-633d48d73fd4 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.955658] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389313, 'name': ReconfigVM_Task, 'duration_secs': 0.575611} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.958027] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f03d70da-7107-4790-b4ae-6882b37001d4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.959588] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 489f1de0-d1c8-4429-a6f1-24ea885282f3/489f1de0-d1c8-4429-a6f1-24ea885282f3.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.960481] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.960481] env[61972]: value = "task-1389316" [ 885.960481] env[61972]: _type = "Task" [ 885.960481] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.974205] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb931fa9-fe70-4ec9-b853-417c8f3b0fcf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.975817] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 885.982970] env[61972]: DEBUG oslo_vmware.api [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 885.982970] env[61972]: value = "task-1389317" [ 885.982970] env[61972]: _type = "Task" [ 885.982970] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.989889] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389316, 'name': CreateVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.992768] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 885.992768] env[61972]: value = "task-1389318" [ 885.992768] env[61972]: _type = "Task" [ 885.992768] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.001626] env[61972]: DEBUG oslo_vmware.api [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.013695] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389318, 'name': Rename_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.051143] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f7f24a-96ef-3365-8beb-89a0c3f5010b, 'name': SearchDatastore_Task, 'duration_secs': 0.01272} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.054049] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.054355] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.054603] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.054826] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.054961] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.055309] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c432a61-9a51-42e7-8a24-5e891697160c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.080992] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.081332] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.082472] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c11e94-ecc7-4524-a099-390c866f45c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.092286] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 886.092286] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]528d7833-4961-99e0-7875-963627ce0c31" [ 886.092286] env[61972]: _type = "Task" [ 886.092286] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.106372] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]528d7833-4961-99e0-7875-963627ce0c31, 'name': SearchDatastore_Task, 'duration_secs': 0.010232} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.107283] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c458589-0384-4b8f-821c-0b8958b8ec63 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.118134] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 886.118134] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5268ae73-15f7-34c0-7f65-3462d7d6cb0e" [ 886.118134] env[61972]: _type = "Task" [ 886.118134] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.132463] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5268ae73-15f7-34c0-7f65-3462d7d6cb0e, 'name': SearchDatastore_Task, 'duration_secs': 0.010356} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.132895] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.133198] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.133503] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-255f2b76-8bc8-4be9-9ca7-ba9abb324c92 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.143205] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 886.143205] env[61972]: value = "task-1389319" [ 886.143205] env[61972]: _type = "Task" [ 886.143205] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.161220] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.259312] env[61972]: DEBUG nova.network.neutron [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Updated VIF entry in instance network info cache for port 635a0344-5f1c-4ac7-be41-f83183d9145d. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.259804] env[61972]: DEBUG nova.network.neutron [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Updating instance_info_cache with network_info: [{"id": "635a0344-5f1c-4ac7-be41-f83183d9145d", "address": "fa:16:3e:24:3c:95", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap635a0344-5f", "ovs_interfaceid": "635a0344-5f1c-4ac7-be41-f83183d9145d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.261646] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "9a0463a0-dc96-41b1-8415-22011644ac0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.261989] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.262356] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "9a0463a0-dc96-41b1-8415-22011644ac0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.262699] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.263306] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.266234] env[61972]: INFO nova.compute.manager [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Terminating instance [ 886.332499] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389312, 'name': CreateSnapshot_Task, 'duration_secs': 1.392707} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.332862] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 886.333685] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87afccb-87f3-4e01-9d4d-d92087475a6a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.392988] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.490520] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389316, 'name': CreateVM_Task, 'duration_secs': 0.442446} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.495538] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 886.496828] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.497143] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.497676] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.502456] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0755527-14bd-4048-9f32-2431b60c1b27 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.509586] env[61972]: DEBUG oslo_vmware.api [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161674} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.514335] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.514601] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.514816] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.515092] env[61972]: INFO nova.compute.manager [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Took 1.21 seconds to destroy the instance on the hypervisor. [ 886.515403] env[61972]: DEBUG oslo.service.loopingcall [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.515679] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 886.515679] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5237df65-b085-870f-556a-22b107ca59e2" [ 886.515679] env[61972]: _type = "Task" [ 886.515679] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.516392] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389318, 'name': Rename_Task, 'duration_secs': 0.205555} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.516637] env[61972]: DEBUG nova.compute.manager [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 886.516737] env[61972]: DEBUG nova.network.neutron [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.518958] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.523291] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-062f0952-21ee-4df8-9b48-4f1eb77933c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.531486] env[61972]: DEBUG nova.scheduler.client.report [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updated inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with generation 91 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 886.531486] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 generation from 91 to 92 during operation: update_inventory {{(pid=61972) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 886.531486] env[61972]: DEBUG nova.compute.provider_tree [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 886.538490] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 886.538490] env[61972]: value = "task-1389320" [ 886.538490] env[61972]: _type = "Task" [ 886.538490] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.539159] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.539386] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Processing image 484207c6-3534-46e0-becb-1716957998ac {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.539634] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.539789] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.539988] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.540335] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-613335a3-ef04-495c-9e68-0cd887e1bd69 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.556293] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389320, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.571419] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.571715] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.573279] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed75bf5c-bb91-4eee-8904-0803d5d86690 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.581993] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 886.581993] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]526223f3-1448-3314-a025-7cafeba59bfc" [ 886.581993] env[61972]: _type = "Task" [ 886.581993] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.591568] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]526223f3-1448-3314-a025-7cafeba59bfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.655913] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389319, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.767491] env[61972]: DEBUG oslo_concurrency.lockutils [req-d8539668-563f-411b-a093-cea92aa173f9 req-f1d56ea5-5c7e-4866-88bc-22aa0d22ca2d service nova] Releasing lock "refresh_cache-a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.770550] env[61972]: DEBUG nova.compute.manager [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 886.770846] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 886.771773] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb23f8f-ba91-4e59-8d0d-ebe1044227c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.780616] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.780886] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44d9a902-f15c-4d45-8107-4931ae7f525c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.787700] env[61972]: DEBUG oslo_vmware.api [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 886.787700] env[61972]: value = "task-1389321" [ 886.787700] env[61972]: _type = "Task" [ 886.787700] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.798190] env[61972]: DEBUG oslo_vmware.api [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389321, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.854332] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 886.854975] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-72ec839f-f9c3-4bf9-a84f-4530c6da4a98 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.864684] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 886.864684] env[61972]: value = "task-1389322" [ 886.864684] env[61972]: _type = "Task" [ 886.864684] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.874324] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389322, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.040511] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.478s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.041164] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 887.043987] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.430s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.044334] env[61972]: DEBUG nova.objects.instance [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'resources' on Instance uuid 3d424523-b45d-4174-ac7a-08fd653e314f {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.060648] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389320, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.093975] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Preparing fetch location {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 887.094327] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Fetch image to [datastore2] OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117/OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117.vmdk {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 887.094475] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Downloading stream optimized image 484207c6-3534-46e0-becb-1716957998ac to [datastore2] OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117/OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117.vmdk on the data store datastore2 as vApp {{(pid=61972) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 887.094654] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Downloading image file data 484207c6-3534-46e0-becb-1716957998ac to the ESX as VM named 'OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117' {{(pid=61972) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 887.157021] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607766} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.157424] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.157725] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.158112] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46faaf12-8086-4035-b193-55cbf37aa25c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.168855] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 887.168855] env[61972]: value = "task-1389323" [ 887.168855] env[61972]: _type = "Task" [ 887.168855] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.183882] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.189382] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 887.189382] env[61972]: value = "resgroup-9" [ 887.189382] env[61972]: _type = "ResourcePool" [ 887.189382] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 887.189797] env[61972]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-31391317-f187-446d-843f-c0dfd10cd607 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.221267] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lease: (returnval){ [ 887.221267] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cecceb-b28b-e048-6ead-01f885ffc929" [ 887.221267] env[61972]: _type = "HttpNfcLease" [ 887.221267] env[61972]: } obtained for vApp import into resource pool (val){ [ 887.221267] env[61972]: value = "resgroup-9" [ 887.221267] env[61972]: _type = "ResourcePool" [ 887.221267] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 887.221766] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the lease: (returnval){ [ 887.221766] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cecceb-b28b-e048-6ead-01f885ffc929" [ 887.221766] env[61972]: _type = "HttpNfcLease" [ 887.221766] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 887.231046] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.231046] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cecceb-b28b-e048-6ead-01f885ffc929" [ 887.231046] env[61972]: _type = "HttpNfcLease" [ 887.231046] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 887.248619] env[61972]: DEBUG nova.compute.manager [req-0fd25023-8ca4-4e2b-b20a-4b42e4b5df55 req-8cf893d6-e6a3-4248-af0f-6e60501289ef service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Received event network-vif-deleted-db1b2713-6097-47ef-bec1-5ef54204a3da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 887.248830] env[61972]: INFO nova.compute.manager [req-0fd25023-8ca4-4e2b-b20a-4b42e4b5df55 req-8cf893d6-e6a3-4248-af0f-6e60501289ef service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Neutron deleted interface db1b2713-6097-47ef-bec1-5ef54204a3da; detaching it from the instance and deleting it from the info cache [ 887.249016] env[61972]: DEBUG nova.network.neutron [req-0fd25023-8ca4-4e2b-b20a-4b42e4b5df55 req-8cf893d6-e6a3-4248-af0f-6e60501289ef service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.298235] env[61972]: DEBUG oslo_vmware.api [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389321, 'name': PowerOffVM_Task, 'duration_secs': 0.248705} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.298517] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.298685] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 887.298982] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fd8d241-b627-4757-a538-d97a9bdf7b77 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.326730] env[61972]: DEBUG nova.network.neutron [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.380434] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389322, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.388424] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 887.388787] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 887.389129] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Deleting the datastore file [datastore2] 9a0463a0-dc96-41b1-8415-22011644ac0d {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.389523] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e701271b-828f-4a12-ae76-9d7b419067c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.397657] env[61972]: DEBUG oslo_vmware.api [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for the task: (returnval){ [ 887.397657] env[61972]: value = "task-1389326" [ 887.397657] env[61972]: _type = "Task" [ 887.397657] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.407479] env[61972]: DEBUG oslo_vmware.api [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.550381] env[61972]: DEBUG nova.compute.utils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 887.556243] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 887.556439] env[61972]: DEBUG nova.network.neutron [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.566904] env[61972]: DEBUG oslo_vmware.api [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389320, 'name': PowerOnVM_Task, 'duration_secs': 0.630063} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.567228] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.567427] env[61972]: INFO nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Took 8.76 seconds to spawn the instance on the hypervisor. [ 887.567623] env[61972]: DEBUG nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 887.568482] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cef7d6c-c3c2-4fb3-b8f9-0d7b66062215 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.609921] env[61972]: DEBUG nova.policy [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ed5336d22ef451e842b188bdd50f353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fd99c56733940dda5267401c71b9e5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 887.679779] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077167} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.680127] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.680887] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6a2760-4a72-4ec2-a583-21880a97334b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.704702] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.707635] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d762e7b-44c0-4cfc-bcd9-213b601179b1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.730765] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.730765] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cecceb-b28b-e048-6ead-01f885ffc929" [ 887.730765] env[61972]: _type = "HttpNfcLease" [ 887.730765] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 887.732096] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 887.732096] env[61972]: value = "task-1389327" [ 887.732096] env[61972]: _type = "Task" [ 887.732096] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.743809] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389327, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.751073] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fc242ae-5481-4e09-837d-f2be18c3df94 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.760494] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ac3b0b-d6f0-4d72-8e54-03b2c6591ff0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.794149] env[61972]: DEBUG nova.compute.manager [req-0fd25023-8ca4-4e2b-b20a-4b42e4b5df55 req-8cf893d6-e6a3-4248-af0f-6e60501289ef service nova] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Detach interface failed, port_id=db1b2713-6097-47ef-bec1-5ef54204a3da, reason: Instance 942b00ba-a615-452d-a0c1-633d48d73fd4 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 887.830736] env[61972]: INFO nova.compute.manager [-] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Took 1.31 seconds to deallocate network for instance. [ 887.872889] env[61972]: DEBUG nova.network.neutron [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Successfully created port: 8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.878924] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389322, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.881310] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dea18c-f637-4c15-93f6-ce84bb0a382a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.888626] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23f1939-37b2-4a7f-8e93-6b22b58bce9d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.924158] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb43f2a6-dc1a-4faa-9983-656973335df1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.931897] env[61972]: DEBUG oslo_vmware.api [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Task: {'id': task-1389326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181847} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.934082] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 887.934354] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 887.934574] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 887.934790] env[61972]: INFO nova.compute.manager [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 887.935091] env[61972]: DEBUG oslo.service.loopingcall [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.935396] env[61972]: DEBUG nova.compute.manager [-] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 887.935521] env[61972]: DEBUG nova.network.neutron [-] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 887.938138] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234089df-7d28-4864-b75a-658f4e9285dd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.953480] env[61972]: DEBUG nova.compute.provider_tree [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.059259] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 888.087377] env[61972]: INFO nova.compute.manager [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Took 25.76 seconds to build instance. [ 888.231700] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 888.231700] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cecceb-b28b-e048-6ead-01f885ffc929" [ 888.231700] env[61972]: _type = "HttpNfcLease" [ 888.231700] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 888.232012] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 888.232012] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cecceb-b28b-e048-6ead-01f885ffc929" [ 888.232012] env[61972]: _type = "HttpNfcLease" [ 888.232012] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 888.232772] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415acb17-a455-424d-9d47-cfc56b00390d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.244380] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5200c69b-e4e3-f452-ebdf-effd4699e86e/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 888.244576] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5200c69b-e4e3-f452-ebdf-effd4699e86e/disk-0.vmdk. {{(pid=61972) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 888.249845] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.331749] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-523ed5d3-fb52-4542-ab58-fa288760683b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.339393] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.377874] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389322, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.457792] env[61972]: DEBUG nova.scheduler.client.report [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 888.591023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2830845-fb50-4c54-a26d-8eacf18d0571 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.259s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.749324] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389327, 'name': ReconfigVM_Task, 'duration_secs': 0.745849} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.750738] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.751452] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25a99f31-39ae-460f-85aa-1c90e2a7a23a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.758246] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 888.758246] env[61972]: value = "task-1389328" [ 888.758246] env[61972]: _type = "Task" [ 888.758246] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.768260] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389328, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.825098] env[61972]: DEBUG nova.network.neutron [-] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.879214] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389322, 'name': CloneVM_Task, 'duration_secs': 1.80644} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.879312] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Created linked-clone VM from snapshot [ 888.880224] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5aea866-4e60-48b4-be24-5e6b62f1a7c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.888183] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Uploading image 5c8a4151-6d77-440d-8eeb-821791c92e89 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 888.914143] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 888.914143] env[61972]: value = "vm-294874" [ 888.914143] env[61972]: _type = "VirtualMachine" [ 888.914143] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 888.914480] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b915c377-6d48-445a-8c5b-5269a49a67c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.922252] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lease: (returnval){ [ 888.922252] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5235563c-ec34-0023-9879-cecaff47eec8" [ 888.922252] env[61972]: _type = "HttpNfcLease" [ 888.922252] env[61972]: } obtained for exporting VM: (result){ [ 888.922252] env[61972]: value = "vm-294874" [ 888.922252] env[61972]: _type = "VirtualMachine" [ 888.922252] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 888.922667] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the lease: (returnval){ [ 888.922667] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5235563c-ec34-0023-9879-cecaff47eec8" [ 888.922667] env[61972]: _type = "HttpNfcLease" [ 888.922667] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 888.931112] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 888.931112] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5235563c-ec34-0023-9879-cecaff47eec8" [ 888.931112] env[61972]: _type = "HttpNfcLease" [ 888.931112] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 888.961622] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.965753] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.505s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.966089] env[61972]: DEBUG nova.objects.instance [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lazy-loading 'resources' on Instance uuid 667aff7f-57d5-4133-934d-386602a866f8 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.980863] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "489f1de0-d1c8-4429-a6f1-24ea885282f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.981385] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.981778] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "489f1de0-d1c8-4429-a6f1-24ea885282f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.981993] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.982180] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.985798] env[61972]: INFO nova.compute.manager [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Terminating instance [ 888.988330] env[61972]: INFO nova.scheduler.client.report [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocations for instance 3d424523-b45d-4174-ac7a-08fd653e314f [ 889.070683] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 889.088200] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Completed reading data from the image iterator. {{(pid=61972) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 889.088507] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5200c69b-e4e3-f452-ebdf-effd4699e86e/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 889.089367] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf8e4e5-80d7-4375-9171-92e932870bf8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.098056] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5200c69b-e4e3-f452-ebdf-effd4699e86e/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 889.098239] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5200c69b-e4e3-f452-ebdf-effd4699e86e/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 889.098475] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-16e4779f-84ad-4fda-8a7f-d720bae5ff33 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.101796] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.102023] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.102184] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.102366] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.102514] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.102661] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.102916] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.103091] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.103275] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.103449] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.103648] env[61972]: DEBUG nova.virt.hardware [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.104472] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c1088f-c446-4ba9-94d7-c0d11d2a6c10 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.116021] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0ed1af-bd77-4661-85c5-01cdde18bb34 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.268182] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389328, 'name': Rename_Task, 'duration_secs': 0.199702} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.268522] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.268794] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82755aa6-f4f4-4289-8ca3-007964957f0a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.273257] env[61972]: DEBUG nova.compute.manager [req-ad0bf942-942b-44e6-a427-4e4e0d8ab33a req-2e71de6b-7fac-4351-a00d-e97f3442e4fc service nova] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Received event network-vif-deleted-7c33b0c3-d8f5-4df1-8f8a-62ae44204b61 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 889.276384] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 889.276384] env[61972]: value = "task-1389330" [ 889.276384] env[61972]: _type = "Task" [ 889.276384] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.285674] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.327482] env[61972]: INFO nova.compute.manager [-] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Took 1.39 seconds to deallocate network for instance. [ 889.432233] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 889.432233] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5235563c-ec34-0023-9879-cecaff47eec8" [ 889.432233] env[61972]: _type = "HttpNfcLease" [ 889.432233] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 889.433321] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 889.433321] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5235563c-ec34-0023-9879-cecaff47eec8" [ 889.433321] env[61972]: _type = "HttpNfcLease" [ 889.433321] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 889.433830] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d42e25-9fee-499a-a275-447f93240f35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.444596] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52633252-5181-8af6-86dc-bcefaaa07f1b/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 889.444894] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52633252-5181-8af6-86dc-bcefaaa07f1b/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 889.508441] env[61972]: DEBUG nova.compute.manager [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 889.508658] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 889.510505] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5cf44052-f213-45f0-8830-fcf97fb2c42f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "3d424523-b45d-4174-ac7a-08fd653e314f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.215s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.513319] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c963476-617f-4fb0-ba4d-412f3d4d553a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.516232] env[61972]: DEBUG oslo_vmware.rw_handles [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5200c69b-e4e3-f452-ebdf-effd4699e86e/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 889.516452] env[61972]: INFO nova.virt.vmwareapi.images [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Downloaded image file data 484207c6-3534-46e0-becb-1716957998ac [ 889.517745] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f666225-4926-4b79-aa42-d800785a21a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.525989] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.538614] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0863f708-96bd-4212-a27d-453d43401784 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.543311] env[61972]: DEBUG nova.network.neutron [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Successfully updated port: 8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.544481] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e026d7e4-ffd2-4471-8e7c-f2b1096f818d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.550406] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8367e827-6dfe-4e91-abfe-1520dc117477 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.553652] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 889.553652] env[61972]: value = "task-1389331" [ 889.553652] env[61972]: _type = "Task" [ 889.553652] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.563891] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.599344] env[61972]: INFO nova.virt.vmwareapi.images [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] The imported VM was unregistered [ 889.601919] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Caching image {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 889.602204] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating directory with path [datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.602516] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ab5cf28-c477-4e8a-b3db-47d51b25f4a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.614063] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Created directory with path [datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.614299] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117/OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117.vmdk to [datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk. {{(pid=61972) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 889.614561] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c0c153f7-291f-426f-9eb8-4a837b85622c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.623895] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 889.623895] env[61972]: value = "task-1389333" [ 889.623895] env[61972]: _type = "Task" [ 889.623895] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.632203] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.791458] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389330, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.795316] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6038791f-cd77-47b2-ba9a-37df6bf1d834 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.804391] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdba4ebd-63f3-4833-bdbd-46e1606ee340 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.842337] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.843536] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9873d468-2acb-428c-97bf-ec7cd911d86b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.853704] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e4eab0-a207-43bb-b81a-a2e254c253b2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.869662] env[61972]: DEBUG nova.compute.provider_tree [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.049468] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "refresh_cache-1597e0f2-f67a-406e-9ef0-4d39b353ab0a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.050333] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "refresh_cache-1597e0f2-f67a-406e-9ef0-4d39b353ab0a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.050333] env[61972]: DEBUG nova.network.neutron [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.065552] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389331, 'name': PowerOffVM_Task, 'duration_secs': 0.21729} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.065806] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.065998] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 890.066272] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a261775-c98a-49bc-bb86-2acec57c499f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.136701] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.167349] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.167780] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.168078] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Deleting the datastore file [datastore2] 489f1de0-d1c8-4429-a6f1-24ea885282f3 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.168445] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-495fa050-44d2-4228-b43d-e14f215a0071 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.176122] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for the task: (returnval){ [ 890.176122] env[61972]: value = "task-1389335" [ 890.176122] env[61972]: _type = "Task" [ 890.176122] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.185425] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.286735] env[61972]: DEBUG oslo_vmware.api [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389330, 'name': PowerOnVM_Task, 'duration_secs': 0.579016} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.287119] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.287420] env[61972]: INFO nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Took 9.13 seconds to spawn the instance on the hypervisor. [ 890.287733] env[61972]: DEBUG nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 890.288667] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db5e956-73b7-42b5-bd77-8a5e5753207f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.374723] env[61972]: DEBUG nova.scheduler.client.report [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 890.587154] env[61972]: DEBUG nova.network.neutron [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.638263] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.685574] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.751170] env[61972]: DEBUG nova.network.neutron [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Updating instance_info_cache with network_info: [{"id": "8b14120b-00c5-492b-9827-1a2726e53641", "address": "fa:16:3e:81:d4:43", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b14120b-00", "ovs_interfaceid": "8b14120b-00c5-492b-9827-1a2726e53641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.815308] env[61972]: INFO nova.compute.manager [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Took 26.93 seconds to build instance. [ 890.879774] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.914s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.882439] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.298s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.884306] env[61972]: INFO nova.compute.claims [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.902813] env[61972]: INFO nova.scheduler.client.report [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Deleted allocations for instance 667aff7f-57d5-4133-934d-386602a866f8 [ 891.137155] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.187397] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.253778] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "refresh_cache-1597e0f2-f67a-406e-9ef0-4d39b353ab0a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.254184] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Instance network_info: |[{"id": "8b14120b-00c5-492b-9827-1a2726e53641", "address": "fa:16:3e:81:d4:43", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b14120b-00", "ovs_interfaceid": "8b14120b-00c5-492b-9827-1a2726e53641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 891.254637] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:d4:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b14120b-00c5-492b-9827-1a2726e53641', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.263908] env[61972]: DEBUG oslo.service.loopingcall [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.264208] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.264530] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b39b591-042f-4fa4-9c98-1ba2acbc3b95 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.285994] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.285994] env[61972]: value = "task-1389336" [ 891.285994] env[61972]: _type = "Task" [ 891.285994] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.294389] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389336, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.302052] env[61972]: DEBUG nova.compute.manager [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Received event network-vif-plugged-8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 891.302228] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] Acquiring lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.302510] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.302712] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.302916] env[61972]: DEBUG nova.compute.manager [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] No waiting events found dispatching network-vif-plugged-8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 891.303140] env[61972]: WARNING nova.compute.manager [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Received unexpected event network-vif-plugged-8b14120b-00c5-492b-9827-1a2726e53641 for instance with vm_state building and task_state spawning. [ 891.303319] env[61972]: DEBUG nova.compute.manager [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Received event network-changed-8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 891.303478] env[61972]: DEBUG nova.compute.manager [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Refreshing instance network info cache due to event network-changed-8b14120b-00c5-492b-9827-1a2726e53641. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 891.303665] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] Acquiring lock "refresh_cache-1597e0f2-f67a-406e-9ef0-4d39b353ab0a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.303833] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] Acquired lock "refresh_cache-1597e0f2-f67a-406e-9ef0-4d39b353ab0a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.304092] env[61972]: DEBUG nova.network.neutron [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Refreshing network info cache for port 8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.315484] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1c5afddc-2a35-496b-9853-4f9334d1255e tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.340s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.413056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-dfa5a3bc-6968-4e2f-93a0-cf8405ad1bb6 tempest-ServerShowV254Test-2085387316 tempest-ServerShowV254Test-2085387316-project-member] Lock "667aff7f-57d5-4133-934d-386602a866f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.218s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.550932] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "c274f675-f45e-49e7-8bf3-582a6977d95c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.551153] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.639344] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.687671] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.796971] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389336, 'name': CreateVM_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.054064] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 892.134203] env[61972]: DEBUG nova.network.neutron [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Updated VIF entry in instance network info cache for port 8b14120b-00c5-492b-9827-1a2726e53641. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.134701] env[61972]: DEBUG nova.network.neutron [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Updating instance_info_cache with network_info: [{"id": "8b14120b-00c5-492b-9827-1a2726e53641", "address": "fa:16:3e:81:d4:43", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b14120b-00", "ovs_interfaceid": "8b14120b-00c5-492b-9827-1a2726e53641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.144909] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.198719] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.221135] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164d41fc-daf6-43b4-8070-1a0139050e01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.229028] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32518c26-0726-48ac-ae74-0c5b675301be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.265921] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932add42-6b72-45bb-a1ad-e68a6e17dc89 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.277931] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b638e5-e785-4c48-a17e-4cb4874c399e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.293840] env[61972]: DEBUG nova.compute.provider_tree [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.304240] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389336, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.581483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.638196] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f6c6ce7-a6cd-4225-bbd9-5707262958c0 req-d67db45a-803c-4669-8f3c-762109e8efff service nova] Releasing lock "refresh_cache-1597e0f2-f67a-406e-9ef0-4d39b353ab0a" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.638715] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389333, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.734159} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.639151] env[61972]: INFO nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117/OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117.vmdk to [datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk. [ 892.639432] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Cleaning up location [datastore2] OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 892.639608] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_efdb85e7-8cde-419d-998b-76f0cffb8117 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.639886] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bae2d64a-d5ab-49bf-b5e1-10c70d529f8f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.646707] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 892.646707] env[61972]: value = "task-1389337" [ 892.646707] env[61972]: _type = "Task" [ 892.646707] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.656183] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.691897] env[61972]: DEBUG oslo_vmware.api [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Task: {'id': task-1389335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.072165} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.692561] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.694070] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.694508] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.694928] env[61972]: INFO nova.compute.manager [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Took 3.19 seconds to destroy the instance on the hypervisor. [ 892.695704] env[61972]: DEBUG oslo.service.loopingcall [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.695993] env[61972]: DEBUG nova.compute.manager [-] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 892.696111] env[61972]: DEBUG nova.network.neutron [-] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.803747] env[61972]: DEBUG nova.scheduler.client.report [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 892.806731] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389336, 'name': CreateVM_Task, 'duration_secs': 1.039311} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.807469] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.807760] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.807921] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.808256] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.808509] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc5f435a-22e4-4b92-b868-5095704a102c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.813418] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 892.813418] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc4f1e-460b-a57b-0c2d-f2cb8bec7cd2" [ 892.813418] env[61972]: _type = "Task" [ 892.813418] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.821846] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc4f1e-460b-a57b-0c2d-f2cb8bec7cd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.156426] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044333} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.156772] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.156772] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.156953] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk to [datastore2] a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee/a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.157284] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22aeccfd-9071-4fa9-b976-d56d81c631da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.164077] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 893.164077] env[61972]: value = "task-1389338" [ 893.164077] env[61972]: _type = "Task" [ 893.164077] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.172283] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.309591] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.309591] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 893.312410] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.591s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.312629] env[61972]: DEBUG nova.objects.instance [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61972) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 893.330023] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc4f1e-460b-a57b-0c2d-f2cb8bec7cd2, 'name': SearchDatastore_Task, 'duration_secs': 0.010506} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.330023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.330023] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.330023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.330270] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.330270] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.330270] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a901e20-c68c-406e-8e25-fcaaf7991204 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.340639] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.340899] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.345021] env[61972]: DEBUG nova.compute.manager [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 893.345021] env[61972]: DEBUG nova.compute.manager [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing instance network info cache due to event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 893.345021] env[61972]: DEBUG oslo_concurrency.lockutils [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.345021] env[61972]: DEBUG oslo_concurrency.lockutils [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.345021] env[61972]: DEBUG nova.network.neutron [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.346705] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bacf7ae-7c6b-4b26-b6f2-8d4416826d2b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.354947] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 893.354947] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]527db031-9a34-1f72-4f35-443da0bb6a51" [ 893.354947] env[61972]: _type = "Task" [ 893.354947] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.364693] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]527db031-9a34-1f72-4f35-443da0bb6a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.455412] env[61972]: DEBUG nova.network.neutron [-] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.676104] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.821089] env[61972]: DEBUG nova.compute.utils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.822624] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 893.822877] env[61972]: DEBUG nova.network.neutron [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 893.877215] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]527db031-9a34-1f72-4f35-443da0bb6a51, 'name': SearchDatastore_Task, 'duration_secs': 0.089114} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.877518] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3f8c74d-a76e-4671-b70e-06dd77ed08b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.883776] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 893.883776] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c7b769-ba00-3a0f-d773-74a10962f41f" [ 893.883776] env[61972]: _type = "Task" [ 893.883776] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.892245] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c7b769-ba00-3a0f-d773-74a10962f41f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.927894] env[61972]: DEBUG nova.policy [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 893.961779] env[61972]: INFO nova.compute.manager [-] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Took 1.26 seconds to deallocate network for instance. [ 894.177094] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.327333] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d6b6b257-c1ba-495a-8b68-d0adad46dd1d tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.329576] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.935s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.331305] env[61972]: INFO nova.compute.claims [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.337136] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 894.391237] env[61972]: DEBUG nova.network.neutron [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updated VIF entry in instance network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 894.391735] env[61972]: DEBUG nova.network.neutron [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.402236] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c7b769-ba00-3a0f-d773-74a10962f41f, 'name': SearchDatastore_Task, 'duration_secs': 0.096108} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.404262] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.404262] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 1597e0f2-f67a-406e-9ef0-4d39b353ab0a/1597e0f2-f67a-406e-9ef0-4d39b353ab0a.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.404262] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06b2c3bc-2637-49b7-9545-549564e5434f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.414032] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 894.414032] env[61972]: value = "task-1389339" [ 894.414032] env[61972]: _type = "Task" [ 894.414032] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.426897] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.469247] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.541944] env[61972]: DEBUG nova.network.neutron [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Successfully created port: c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.677776] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.894896] env[61972]: DEBUG oslo_concurrency.lockutils [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.895420] env[61972]: DEBUG nova.compute.manager [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Received event network-vif-deleted-08285af7-59f1-4c6a-acd6-ef8aa2fd1506 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 894.895625] env[61972]: INFO nova.compute.manager [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Neutron deleted interface 08285af7-59f1-4c6a-acd6-ef8aa2fd1506; detaching it from the instance and deleting it from the info cache [ 894.895807] env[61972]: DEBUG nova.network.neutron [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.928887] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.189675] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.356142] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 895.387544] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.387544] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.387544] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.387733] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.388012] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.388243] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.388478] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.388636] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.388806] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.388970] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.389157] env[61972]: DEBUG nova.virt.hardware [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.391472] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e509e95-6717-4428-bfe7-ea3890175b88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.400997] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb83647-f2d6-4c79-b0cc-446f4678192e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.409617] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35e65800-b8c3-4804-884e-09465529c543 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.438317] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1f1a2b-35a7-481e-93ae-310179bda0cf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.460312] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.471408] env[61972]: DEBUG nova.compute.manager [req-059ab07f-7ee6-4d1f-b895-237cefd953d2 req-16c78937-0f98-4965-9499-7e54af8df5e7 service nova] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Detach interface failed, port_id=08285af7-59f1-4c6a-acd6-ef8aa2fd1506, reason: Instance 489f1de0-d1c8-4429-a6f1-24ea885282f3 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 895.689854] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.714641] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99262e94-fe35-46fa-b83d-568d754b1486 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.722689] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e44fa8c-fe4f-4cfa-9252-89a551051718 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.757296] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cd85f4-d479-4836-8e3e-07cae25c8630 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.765978] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c00992-63ce-4250-9067-c5a2e1df2932 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.781982] env[61972]: DEBUG nova.compute.provider_tree [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.936350] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389339, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.184457] env[61972]: DEBUG nova.compute.manager [req-5812c610-1fc5-4276-af51-e366f5b164fe req-5806b821-036b-4cd1-b9c7-bf617e398475 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Received event network-vif-plugged-c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 896.184719] env[61972]: DEBUG oslo_concurrency.lockutils [req-5812c610-1fc5-4276-af51-e366f5b164fe req-5806b821-036b-4cd1-b9c7-bf617e398475 service nova] Acquiring lock "a4e65047-a892-4f18-8a14-0f5de25ce235-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.185237] env[61972]: DEBUG oslo_concurrency.lockutils [req-5812c610-1fc5-4276-af51-e366f5b164fe req-5806b821-036b-4cd1-b9c7-bf617e398475 service nova] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.185587] env[61972]: DEBUG oslo_concurrency.lockutils [req-5812c610-1fc5-4276-af51-e366f5b164fe req-5806b821-036b-4cd1-b9c7-bf617e398475 service nova] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.185874] env[61972]: DEBUG nova.compute.manager [req-5812c610-1fc5-4276-af51-e366f5b164fe req-5806b821-036b-4cd1-b9c7-bf617e398475 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] No waiting events found dispatching network-vif-plugged-c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.186386] env[61972]: WARNING nova.compute.manager [req-5812c610-1fc5-4276-af51-e366f5b164fe req-5806b821-036b-4cd1-b9c7-bf617e398475 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Received unexpected event network-vif-plugged-c0baab24-8aa5-4d82-9f32-62579ec19c0b for instance with vm_state building and task_state spawning. [ 896.193754] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389338, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.585516} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.194086] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/484207c6-3534-46e0-becb-1716957998ac/484207c6-3534-46e0-becb-1716957998ac.vmdk to [datastore2] a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee/a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.196396] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3937a84e-c20c-4f48-b450-4ffb50bb2ecf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.228912] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee/a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee.vmdk or device None with type streamOptimized {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.229137] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b8d7a2c-bdd3-4b99-846c-7f98825459a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.249518] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 896.249518] env[61972]: value = "task-1389340" [ 896.249518] env[61972]: _type = "Task" [ 896.249518] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.259861] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389340, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.285768] env[61972]: DEBUG nova.scheduler.client.report [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 896.294578] env[61972]: DEBUG nova.network.neutron [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Successfully updated port: c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.439019] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389339, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.730772} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.439019] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 1597e0f2-f67a-406e-9ef0-4d39b353ab0a/1597e0f2-f67a-406e-9ef0-4d39b353ab0a.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.439019] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.439019] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4f3551f-58b7-4095-8fe2-e1626c5c4e66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.444593] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 896.444593] env[61972]: value = "task-1389341" [ 896.444593] env[61972]: _type = "Task" [ 896.444593] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.453088] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389341, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.761196] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389340, 'name': ReconfigVM_Task, 'duration_secs': 0.447275} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.763064] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Reconfigured VM instance instance-00000051 to attach disk [datastore2] a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee/a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee.vmdk or device None with type streamOptimized {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.768290] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3119b5d-e69a-4389-8c47-c809503de503 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.774116] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 896.774116] env[61972]: value = "task-1389342" [ 896.774116] env[61972]: _type = "Task" [ 896.774116] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.783041] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389342, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.796340] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.796340] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 896.800783] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.458s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.800783] env[61972]: DEBUG nova.objects.instance [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'resources' on Instance uuid 942b00ba-a615-452d-a0c1-633d48d73fd4 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.800783] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-a4e65047-a892-4f18-8a14-0f5de25ce235" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.800783] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-a4e65047-a892-4f18-8a14-0f5de25ce235" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.800783] env[61972]: DEBUG nova.network.neutron [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.957260] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389341, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069528} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.958035] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.959803] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c29e87-3974-4ad5-831b-2f4f9bf0092a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.989020] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 1597e0f2-f67a-406e-9ef0-4d39b353ab0a/1597e0f2-f67a-406e-9ef0-4d39b353ab0a.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.989020] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11108ae3-d0a8-41f1-8c72-8dbfb6ec168f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.014183] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 897.014183] env[61972]: value = "task-1389343" [ 897.014183] env[61972]: _type = "Task" [ 897.014183] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.021309] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389343, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.286276] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389342, 'name': Rename_Task, 'duration_secs': 0.175157} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.286821] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.287213] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fefbfdde-eb7f-4406-9d6b-0a2b2d66c145 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.294860] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 897.294860] env[61972]: value = "task-1389344" [ 897.294860] env[61972]: _type = "Task" [ 897.294860] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.306039] env[61972]: DEBUG nova.compute.utils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.313410] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 897.313611] env[61972]: DEBUG nova.network.neutron [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.315321] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.374742] env[61972]: DEBUG nova.network.neutron [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.421117] env[61972]: DEBUG nova.policy [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cefef67f4ae0451aaa108df20aa7a3db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a685a448ff041db8bc49b4429688e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 897.521959] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389343, 'name': ReconfigVM_Task, 'duration_secs': 0.501199} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.524941] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 1597e0f2-f67a-406e-9ef0-4d39b353ab0a/1597e0f2-f67a-406e-9ef0-4d39b353ab0a.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.527644] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0043d1a9-e92b-4e11-bcea-d7cedef444f0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.535516] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 897.535516] env[61972]: value = "task-1389345" [ 897.535516] env[61972]: _type = "Task" [ 897.535516] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.543886] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389345, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.634250] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729f3547-3ee7-41bf-a936-9b310b9dcdd1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.643289] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e34b60-d089-4415-b464-18d1074f7c26 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.648307] env[61972]: DEBUG nova.network.neutron [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Updating instance_info_cache with network_info: [{"id": "c0baab24-8aa5-4d82-9f32-62579ec19c0b", "address": "fa:16:3e:e0:db:e0", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0baab24-8a", "ovs_interfaceid": "c0baab24-8aa5-4d82-9f32-62579ec19c0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.679567] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327e72f4-6679-456f-96bc-597139e1d8f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.690474] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccb103b-1e53-42d6-9e75-871c533e6093 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.707314] env[61972]: DEBUG nova.compute.provider_tree [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.808112] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389344, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.817942] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 897.832033] env[61972]: DEBUG nova.network.neutron [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Successfully created port: dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.051156] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389345, 'name': Rename_Task, 'duration_secs': 0.207673} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.051156] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.051156] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a382fc27-8582-4343-9227-4938e8a2843f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.058395] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 898.058395] env[61972]: value = "task-1389346" [ 898.058395] env[61972]: _type = "Task" [ 898.058395] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.068782] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.154138] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-a4e65047-a892-4f18-8a14-0f5de25ce235" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.154138] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Instance network_info: |[{"id": "c0baab24-8aa5-4d82-9f32-62579ec19c0b", "address": "fa:16:3e:e0:db:e0", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0baab24-8a", "ovs_interfaceid": "c0baab24-8aa5-4d82-9f32-62579ec19c0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 898.154531] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:db:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0baab24-8aa5-4d82-9f32-62579ec19c0b', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.160877] env[61972]: DEBUG oslo.service.loopingcall [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.161318] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.161690] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0206b0e2-a25d-4c8c-b445-3393fccfc9fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.183591] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.183591] env[61972]: value = "task-1389347" [ 898.183591] env[61972]: _type = "Task" [ 898.183591] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.192257] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389347, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.213020] env[61972]: DEBUG nova.scheduler.client.report [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 898.248865] env[61972]: DEBUG nova.compute.manager [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Received event network-changed-c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 898.249940] env[61972]: DEBUG nova.compute.manager [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Refreshing instance network info cache due to event network-changed-c0baab24-8aa5-4d82-9f32-62579ec19c0b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 898.251716] env[61972]: DEBUG oslo_concurrency.lockutils [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] Acquiring lock "refresh_cache-a4e65047-a892-4f18-8a14-0f5de25ce235" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.251716] env[61972]: DEBUG oslo_concurrency.lockutils [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] Acquired lock "refresh_cache-a4e65047-a892-4f18-8a14-0f5de25ce235" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.251716] env[61972]: DEBUG nova.network.neutron [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Refreshing network info cache for port c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.307159] env[61972]: DEBUG oslo_vmware.api [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389344, 'name': PowerOnVM_Task, 'duration_secs': 0.656302} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.307484] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.307987] env[61972]: INFO nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Took 14.74 seconds to spawn the instance on the hypervisor. [ 898.307987] env[61972]: DEBUG nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 898.309071] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b84734a-7848-4e6b-8e31-13d4117c744e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.569298] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389346, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.693248] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389347, 'name': CreateVM_Task, 'duration_secs': 0.503717} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.693419] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 898.694183] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.694361] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.694686] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.694949] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3761a09-0715-4dc2-99f6-4bbe27ffcf0b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.700032] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 898.700032] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5209d92b-8cf1-819e-7465-33679c3ca260" [ 898.700032] env[61972]: _type = "Task" [ 898.700032] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.707830] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5209d92b-8cf1-819e-7465-33679c3ca260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.718245] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.720475] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.878s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.720724] env[61972]: DEBUG nova.objects.instance [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lazy-loading 'resources' on Instance uuid 9a0463a0-dc96-41b1-8415-22011644ac0d {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.760307] env[61972]: INFO nova.scheduler.client.report [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocations for instance 942b00ba-a615-452d-a0c1-633d48d73fd4 [ 898.828146] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 898.831054] env[61972]: INFO nova.compute.manager [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Took 34.23 seconds to build instance. [ 898.858472] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.858727] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.858891] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.859129] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.860009] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.860009] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.860009] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.860009] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.860009] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.860282] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.860443] env[61972]: DEBUG nova.virt.hardware [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.861687] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8679d3-f230-4501-91d7-bc49d2d08c03 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.870766] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae629637-a37b-494e-bd67-1be578677804 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.069799] env[61972]: DEBUG oslo_vmware.api [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389346, 'name': PowerOnVM_Task, 'duration_secs': 0.806722} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.070308] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.070641] env[61972]: INFO nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Took 10.00 seconds to spawn the instance on the hypervisor. [ 899.070947] env[61972]: DEBUG nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 899.071900] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1e94b2-b4c0-42ad-a0e0-100918814d19 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.211230] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5209d92b-8cf1-819e-7465-33679c3ca260, 'name': SearchDatastore_Task, 'duration_secs': 0.009942} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.211828] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.211828] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.212057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.212215] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.212558] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.214881] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21a7f16d-fb72-4575-85cf-b056930fb1d8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.223914] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.224192] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.224899] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cafe4028-4073-4144-9a58-d88fb15f7980 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.232901] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 899.232901] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d073e3-9380-ea64-4a27-080897f52897" [ 899.232901] env[61972]: _type = "Task" [ 899.232901] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.240944] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d073e3-9380-ea64-4a27-080897f52897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.269233] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c22933a7-c094-4c35-996f-089f957d1f7f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "942b00ba-a615-452d-a0c1-633d48d73fd4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.468s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.282309] env[61972]: DEBUG nova.network.neutron [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Updated VIF entry in instance network info cache for port c0baab24-8aa5-4d82-9f32-62579ec19c0b. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.282510] env[61972]: DEBUG nova.network.neutron [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Updating instance_info_cache with network_info: [{"id": "c0baab24-8aa5-4d82-9f32-62579ec19c0b", "address": "fa:16:3e:e0:db:e0", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0baab24-8a", "ovs_interfaceid": "c0baab24-8aa5-4d82-9f32-62579ec19c0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.334585] env[61972]: DEBUG oslo_concurrency.lockutils [None req-37ea462f-cc9b-475d-ab14-6e68b5b464e7 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.824s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.469452] env[61972]: DEBUG nova.network.neutron [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Successfully updated port: dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.297476] env[61972]: DEBUG oslo_concurrency.lockutils [req-d9b779ed-ca90-4350-a3d7-fe2314551e54 req-e9f2cd91-3bde-4cf2-891f-193cfd3fc3b1 service nova] Releasing lock "refresh_cache-a4e65047-a892-4f18-8a14-0f5de25ce235" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.298851] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.298851] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.298851] env[61972]: DEBUG nova.network.neutron [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.307379] env[61972]: DEBUG nova.compute.manager [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Received event network-vif-plugged-dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 900.307379] env[61972]: DEBUG oslo_concurrency.lockutils [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] Acquiring lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.307379] env[61972]: DEBUG oslo_concurrency.lockutils [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.307379] env[61972]: DEBUG oslo_concurrency.lockutils [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.307616] env[61972]: DEBUG nova.compute.manager [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] No waiting events found dispatching network-vif-plugged-dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 900.309548] env[61972]: WARNING nova.compute.manager [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Received unexpected event network-vif-plugged-dd99d79a-cd6c-477b-88f4-45e9d019f331 for instance with vm_state building and task_state spawning. [ 900.309548] env[61972]: DEBUG nova.compute.manager [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Received event network-changed-dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 900.309548] env[61972]: DEBUG nova.compute.manager [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Refreshing instance network info cache due to event network-changed-dd99d79a-cd6c-477b-88f4-45e9d019f331. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 900.309548] env[61972]: DEBUG oslo_concurrency.lockutils [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] Acquiring lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.318697] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "12a1a1ee-9aa1-4dda-9276-68492718e404" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.319181] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.321161] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b69c857-8c21-4cff-aa61-f4e6a93e498c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.325185] env[61972]: INFO nova.compute.manager [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Took 28.18 seconds to build instance. [ 900.339036] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16cb3dc-7712-4fc0-9d82-f41680cd7686 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.341865] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d073e3-9380-ea64-4a27-080897f52897, 'name': SearchDatastore_Task, 'duration_secs': 0.009586} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.344455] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-841495c8-25a0-447c-8710-de4646a2f730 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.373323] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649184f0-443e-4cbd-9421-f64ef0e8e97d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.378048] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 900.378048] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]526b52a1-01cd-fd21-a7ba-8bb19cd8d518" [ 900.378048] env[61972]: _type = "Task" [ 900.378048] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.386164] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a606c2-8ba6-4202-a7d9-ea2399200d09 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.394267] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]526b52a1-01cd-fd21-a7ba-8bb19cd8d518, 'name': SearchDatastore_Task, 'duration_secs': 0.009367} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.394930] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.395248] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] a4e65047-a892-4f18-8a14-0f5de25ce235/a4e65047-a892-4f18-8a14-0f5de25ce235.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.395803] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20c93a05-37a4-4b85-abaa-12d100f884e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.407911] env[61972]: DEBUG nova.compute.provider_tree [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.414067] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 900.414067] env[61972]: value = "task-1389348" [ 900.414067] env[61972]: _type = "Task" [ 900.414067] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.422542] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.812532] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.813040] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.813218] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.813513] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.813603] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.818013] env[61972]: INFO nova.compute.manager [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Terminating instance [ 900.827286] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 900.831156] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2c946a3-b84f-4e3a-8a0b-8f64f6b1a5c4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.690s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.854301] env[61972]: DEBUG nova.network.neutron [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.879710] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.880011] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.880279] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.880405] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.880733] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.884898] env[61972]: INFO nova.compute.manager [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Terminating instance [ 900.910886] env[61972]: DEBUG nova.scheduler.client.report [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 900.924405] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460335} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.927792] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] a4e65047-a892-4f18-8a14-0f5de25ce235/a4e65047-a892-4f18-8a14-0f5de25ce235.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.928186] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 900.928502] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cde762a-b815-4bf7-b585-97cf2fed287d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.937438] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 900.937438] env[61972]: value = "task-1389349" [ 900.937438] env[61972]: _type = "Task" [ 900.937438] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.946734] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389349, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.001496] env[61972]: DEBUG nova.network.neutron [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [{"id": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "address": "fa:16:3e:c8:26:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd99d79a-cd", "ovs_interfaceid": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.323048] env[61972]: DEBUG nova.compute.manager [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 901.323048] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.324022] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3073e3-2dc4-421f-8291-db1344aba5b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.332178] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.332460] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a204303-90df-4e86-b180-415aab995690 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.342296] env[61972]: DEBUG oslo_vmware.api [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 901.342296] env[61972]: value = "task-1389350" [ 901.342296] env[61972]: _type = "Task" [ 901.342296] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.354077] env[61972]: DEBUG oslo_vmware.api [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.358588] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.389626] env[61972]: DEBUG nova.compute.manager [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 901.389936] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.390930] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec38fae-0994-488f-9a2e-6992c680d4cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.399433] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.399724] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9d634b4-c32f-4348-b194-447d07e3f139 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.406612] env[61972]: DEBUG oslo_vmware.api [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 901.406612] env[61972]: value = "task-1389351" [ 901.406612] env[61972]: _type = "Task" [ 901.406612] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.415271] env[61972]: DEBUG oslo_vmware.api [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.416055] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.696s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.418211] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.837s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.419696] env[61972]: INFO nova.compute.claims [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.445729] env[61972]: INFO nova.scheduler.client.report [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Deleted allocations for instance 9a0463a0-dc96-41b1-8415-22011644ac0d [ 901.449949] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389349, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126824} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.450736] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.453790] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efa40ab-8569-4324-a7a8-57b6fc93dcdc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.480034] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] a4e65047-a892-4f18-8a14-0f5de25ce235/a4e65047-a892-4f18-8a14-0f5de25ce235.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.481236] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd930571-0416-4642-90b7-b91ae9a06cbf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.502703] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 901.502703] env[61972]: value = "task-1389352" [ 901.502703] env[61972]: _type = "Task" [ 901.502703] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.506976] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.507340] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Instance network_info: |[{"id": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "address": "fa:16:3e:c8:26:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd99d79a-cd", "ovs_interfaceid": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 901.507647] env[61972]: DEBUG oslo_concurrency.lockutils [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] Acquired lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.507827] env[61972]: DEBUG nova.network.neutron [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Refreshing network info cache for port dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.509025] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:26:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd99d79a-cd6c-477b-88f4-45e9d019f331', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.516719] env[61972]: DEBUG oslo.service.loopingcall [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.520202] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.524456] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcff4cec-c6c9-45aa-8e14-51587955cde7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.540070] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389352, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.545364] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.545364] env[61972]: value = "task-1389353" [ 901.545364] env[61972]: _type = "Task" [ 901.545364] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.554565] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389353, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.759034] env[61972]: DEBUG nova.network.neutron [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updated VIF entry in instance network info cache for port dd99d79a-cd6c-477b-88f4-45e9d019f331. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.759593] env[61972]: DEBUG nova.network.neutron [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [{"id": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "address": "fa:16:3e:c8:26:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd99d79a-cd", "ovs_interfaceid": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.853599] env[61972]: DEBUG oslo_vmware.api [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389350, 'name': PowerOffVM_Task, 'duration_secs': 0.287204} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.854611] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.854611] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.854611] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-359a1fbe-9436-4cd2-8dd1-b1bdd67129a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.917312] env[61972]: DEBUG oslo_vmware.api [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389351, 'name': PowerOffVM_Task, 'duration_secs': 0.299739} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.917517] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.917688] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.918576] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6480ebfc-bcba-4f95-9d31-29e77851a31c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.920802] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.920999] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.921199] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleting the datastore file [datastore2] a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.922246] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bc33ac0-c3d4-4eb3-8eee-867990ff5cb2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.930646] env[61972]: DEBUG oslo_vmware.api [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 901.930646] env[61972]: value = "task-1389356" [ 901.930646] env[61972]: _type = "Task" [ 901.930646] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.940236] env[61972]: DEBUG oslo_vmware.api [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.960376] env[61972]: DEBUG oslo_concurrency.lockutils [None req-44ea682c-76aa-4be5-9d20-6527e05554d1 tempest-ServerAddressesTestJSON-2077627599 tempest-ServerAddressesTestJSON-2077627599-project-member] Lock "9a0463a0-dc96-41b1-8415-22011644ac0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.698s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.008506] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.008506] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.008711] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleting the datastore file [datastore2] 1597e0f2-f67a-406e-9ef0-4d39b353ab0a {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.009684] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7b800a5-9a74-49e0-bf54-82a186079ebd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.014957] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389352, 'name': ReconfigVM_Task, 'duration_secs': 0.47447} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.015693] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Reconfigured VM instance instance-00000053 to attach disk [datastore1] a4e65047-a892-4f18-8a14-0f5de25ce235/a4e65047-a892-4f18-8a14-0f5de25ce235.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.016422] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6c1203c-e87b-4758-8000-4230b64f05b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.020735] env[61972]: DEBUG oslo_vmware.api [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 902.020735] env[61972]: value = "task-1389357" [ 902.020735] env[61972]: _type = "Task" [ 902.020735] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.024974] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 902.024974] env[61972]: value = "task-1389358" [ 902.024974] env[61972]: _type = "Task" [ 902.024974] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.031816] env[61972]: DEBUG oslo_vmware.api [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.037958] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389358, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.054105] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389353, 'name': CreateVM_Task, 'duration_secs': 0.467005} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.054297] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.055066] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.055310] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.055683] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.055983] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7550cba-38c7-419a-9b14-c5961ceeccb8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.060923] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 902.060923] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52694062-d675-c48a-99e2-4db1bdd17ade" [ 902.060923] env[61972]: _type = "Task" [ 902.060923] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.069880] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52694062-d675-c48a-99e2-4db1bdd17ade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.263231] env[61972]: DEBUG oslo_concurrency.lockutils [req-7afefe22-85e1-4d3f-bce4-574f9ac21856 req-58a2e537-32da-49ef-b5a0-862c0d39e799 service nova] Releasing lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.441587] env[61972]: DEBUG oslo_vmware.api [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211681} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.441908] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.442042] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.442226] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.442397] env[61972]: INFO nova.compute.manager [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Took 1.12 seconds to destroy the instance on the hypervisor. [ 902.442638] env[61972]: DEBUG oslo.service.loopingcall [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.442855] env[61972]: DEBUG nova.compute.manager [-] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 902.442954] env[61972]: DEBUG nova.network.neutron [-] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.535275] env[61972]: DEBUG oslo_vmware.api [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231574} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.536089] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.536215] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.536394] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.536602] env[61972]: INFO nova.compute.manager [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 902.536851] env[61972]: DEBUG oslo.service.loopingcall [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.537617] env[61972]: DEBUG nova.compute.manager [-] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 902.537617] env[61972]: DEBUG nova.network.neutron [-] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.541549] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389358, 'name': Rename_Task, 'duration_secs': 0.180676} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.542539] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.542838] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19834c9d-75ad-4393-936b-dad0087bac01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.552228] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 902.552228] env[61972]: value = "task-1389359" [ 902.552228] env[61972]: _type = "Task" [ 902.552228] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.558608] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.576649] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52694062-d675-c48a-99e2-4db1bdd17ade, 'name': SearchDatastore_Task, 'duration_secs': 0.012305} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.577040] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.577324] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.577763] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.577763] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.578033] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.580178] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52633252-5181-8af6-86dc-bcefaaa07f1b/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 902.580458] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b928334-74dc-45d1-8d9f-b95a84bcf594 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.583204] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f48331e-d25d-4785-8057-6db7fbd374ba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.592661] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52633252-5181-8af6-86dc-bcefaaa07f1b/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 902.592876] env[61972]: ERROR oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52633252-5181-8af6-86dc-bcefaaa07f1b/disk-0.vmdk due to incomplete transfer. [ 902.593129] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e5ba5c2f-a736-4717-b12c-17e0bc810c63 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.595549] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.595734] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.596752] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8acbbd5a-38fc-4c8e-9436-903ccac3fb45 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.602214] env[61972]: DEBUG oslo_vmware.rw_handles [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52633252-5181-8af6-86dc-bcefaaa07f1b/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 902.602479] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Uploaded image 5c8a4151-6d77-440d-8eeb-821791c92e89 to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 902.604288] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 902.605551] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e1b1b5f6-dabb-433e-9113-0530bc6f630b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.610697] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 902.610697] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5210543f-a6db-b3cb-a8b5-9ba8714bc557" [ 902.610697] env[61972]: _type = "Task" [ 902.610697] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.615919] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 902.615919] env[61972]: value = "task-1389360" [ 902.615919] env[61972]: _type = "Task" [ 902.615919] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.624129] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5210543f-a6db-b3cb-a8b5-9ba8714bc557, 'name': SearchDatastore_Task, 'duration_secs': 0.008088} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.625411] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4c6aca4-c06e-44a3-850e-211b63dfec47 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.630795] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389360, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.635200] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 902.635200] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52224402-d8e8-d679-c495-6698fefe7283" [ 902.635200] env[61972]: _type = "Task" [ 902.635200] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.642688] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52224402-d8e8-d679-c495-6698fefe7283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.694074] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869b1397-5940-4d0b-b012-fad276f0b4ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.702233] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfcd65e-fd2d-43d9-b7cf-820e8e5e9270 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.735034] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6759b00e-bc2a-4f18-be07-696be12b2b42 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.742791] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1b25bd-f1b2-42f3-a673-1eef6a3115de {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.757328] env[61972]: DEBUG nova.compute.provider_tree [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 902.843179] env[61972]: DEBUG nova.compute.manager [req-b4262706-fa59-4cee-99bc-2cfe59deb04d req-374b7372-2586-4779-a21e-62461b20a8c1 service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Received event network-vif-deleted-635a0344-5f1c-4ac7-be41-f83183d9145d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 902.843722] env[61972]: INFO nova.compute.manager [req-b4262706-fa59-4cee-99bc-2cfe59deb04d req-374b7372-2586-4779-a21e-62461b20a8c1 service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Neutron deleted interface 635a0344-5f1c-4ac7-be41-f83183d9145d; detaching it from the instance and deleting it from the info cache [ 902.844123] env[61972]: DEBUG nova.network.neutron [req-b4262706-fa59-4cee-99bc-2cfe59deb04d req-374b7372-2586-4779-a21e-62461b20a8c1 service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.060760] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389359, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.126752] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389360, 'name': Destroy_Task, 'duration_secs': 0.440203} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.127294] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Destroyed the VM [ 903.127644] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 903.128017] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9fce6d52-11a5-4d56-90df-17fe1abc78dd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.135122] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 903.135122] env[61972]: value = "task-1389361" [ 903.135122] env[61972]: _type = "Task" [ 903.135122] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.150940] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52224402-d8e8-d679-c495-6698fefe7283, 'name': SearchDatastore_Task, 'duration_secs': 0.009012} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.156873] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.157479] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503/84e07f61-2111-43cb-93a2-9cb47ac52503.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.158181] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389361, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.158563] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f465951-d2e6-4397-be0a-f24b24349faf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.166724] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 903.166724] env[61972]: value = "task-1389362" [ 903.166724] env[61972]: _type = "Task" [ 903.166724] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.179398] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.283928] env[61972]: ERROR nova.scheduler.client.report [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [req-a4086ef7-d6a6-422d-bf90-97a9db4613bc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a4086ef7-d6a6-422d-bf90-97a9db4613bc"}]} [ 903.303895] env[61972]: DEBUG nova.scheduler.client.report [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 903.321590] env[61972]: DEBUG nova.network.neutron [-] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.324632] env[61972]: DEBUG nova.scheduler.client.report [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 903.324978] env[61972]: DEBUG nova.compute.provider_tree [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 903.345135] env[61972]: DEBUG nova.scheduler.client.report [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 903.346705] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0045620b-4b29-4097-9771-ea64a9d7eacd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.359082] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b5bb6d-d522-4770-8b77-412082d01667 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.371064] env[61972]: DEBUG nova.scheduler.client.report [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 903.398774] env[61972]: DEBUG nova.compute.manager [req-b4262706-fa59-4cee-99bc-2cfe59deb04d req-374b7372-2586-4779-a21e-62461b20a8c1 service nova] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Detach interface failed, port_id=635a0344-5f1c-4ac7-be41-f83183d9145d, reason: Instance a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 903.407259] env[61972]: DEBUG nova.network.neutron [-] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.562786] env[61972]: DEBUG oslo_vmware.api [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389359, 'name': PowerOnVM_Task, 'duration_secs': 0.575191} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.565701] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.565997] env[61972]: INFO nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Took 8.21 seconds to spawn the instance on the hypervisor. [ 903.566213] env[61972]: DEBUG nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 903.567307] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d043dfa7-75a1-46d1-b061-5811b2a286c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.648271] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389361, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.650142] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cdf0e9-30a4-4900-b27c-b66582c67201 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.657682] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7538fe4a-5120-4898-b9d8-ef5d86f178e0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.691635] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b316c7-9cf3-4203-9b0d-0fc000969c08 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.700469] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526372} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.702898] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503/84e07f61-2111-43cb-93a2-9cb47ac52503.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.703264] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.703589] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96139e0a-d4ff-4564-a43a-bd8b64ae9b47 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.706539] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d319a752-f886-4a28-a6e8-5fa9e1e4e6b0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.722154] env[61972]: DEBUG nova.compute.provider_tree [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 903.725044] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 903.725044] env[61972]: value = "task-1389363" [ 903.725044] env[61972]: _type = "Task" [ 903.725044] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.733494] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.828215] env[61972]: INFO nova.compute.manager [-] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Took 1.39 seconds to deallocate network for instance. [ 903.910816] env[61972]: INFO nova.compute.manager [-] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Took 1.37 seconds to deallocate network for instance. [ 904.085015] env[61972]: INFO nova.compute.manager [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Took 26.53 seconds to build instance. [ 904.149610] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389361, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.244823] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077074} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.245205] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 904.246119] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526432b0-2ab7-4912-addc-04b594f25c9c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.275306] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503/84e07f61-2111-43cb-93a2-9cb47ac52503.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.276487] env[61972]: DEBUG nova.scheduler.client.report [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updated inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 904.276745] env[61972]: DEBUG nova.compute.provider_tree [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updating resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 generation from 95 to 96 during operation: update_inventory {{(pid=61972) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 904.277022] env[61972]: DEBUG nova.compute.provider_tree [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 904.280468] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8ed7992-a572-4732-920f-c04791026ace {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.300679] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 904.300679] env[61972]: value = "task-1389364" [ 904.300679] env[61972]: _type = "Task" [ 904.300679] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.312118] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389364, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.334851] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.422334] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.590800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b9f9789b-c017-461b-8ae4-83f135ba5d78 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.039s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.649322] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389361, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.676331] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "a4e65047-a892-4f18-8a14-0f5de25ce235" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.676630] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.676846] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "a4e65047-a892-4f18-8a14-0f5de25ce235-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.677201] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.677381] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.680184] env[61972]: INFO nova.compute.manager [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Terminating instance [ 904.797049] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.379s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.797634] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 904.800254] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.331s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.800479] env[61972]: DEBUG nova.objects.instance [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lazy-loading 'resources' on Instance uuid 489f1de0-d1c8-4429-a6f1-24ea885282f3 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.813791] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.867409] env[61972]: DEBUG nova.compute.manager [req-a1664fc5-4b2e-498d-a25e-57e7d289d642 req-0883e7d8-1131-4984-bc5d-d5020b05be87 service nova] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Received event network-vif-deleted-8b14120b-00c5-492b-9827-1a2726e53641 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 905.149816] env[61972]: DEBUG oslo_vmware.api [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389361, 'name': RemoveSnapshot_Task, 'duration_secs': 1.771454} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.150148] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 905.150675] env[61972]: INFO nova.compute.manager [None req-de60074b-7a93-497d-a50e-0b3d083eb2d0 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Took 20.89 seconds to snapshot the instance on the hypervisor. [ 905.186804] env[61972]: DEBUG nova.compute.manager [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 905.187038] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.188093] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a90cc-f9d6-4172-bb9b-42ddd9e8670d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.196270] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.196503] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70f41c66-820f-47c1-9d2e-4f55c7e81136 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.206049] env[61972]: DEBUG oslo_vmware.api [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 905.206049] env[61972]: value = "task-1389365" [ 905.206049] env[61972]: _type = "Task" [ 905.206049] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.213865] env[61972]: DEBUG oslo_vmware.api [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.304200] env[61972]: DEBUG nova.compute.utils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.311935] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 905.312133] env[61972]: DEBUG nova.network.neutron [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 905.323346] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389364, 'name': ReconfigVM_Task, 'duration_secs': 1.00011} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.324202] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503/84e07f61-2111-43cb-93a2-9cb47ac52503.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.324879] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6655bd22-66ae-4633-8bf1-f743207bc033 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.331931] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 905.331931] env[61972]: value = "task-1389366" [ 905.331931] env[61972]: _type = "Task" [ 905.331931] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.345231] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389366, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.359199] env[61972]: DEBUG nova.policy [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc3cd61498bc4f858a47a72f02466b3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3c052a272742808be2bcdc71d8f62f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 905.549065] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9e4f83-23a9-44e5-b38f-997461d9ae51 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.556191] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb33f44-d0e6-433f-947a-165e74805864 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.589808] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457b2d0f-c2d0-4344-a171-65a393991fff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.597715] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051b0938-9571-45f0-b7f9-1cb38836a14e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.611544] env[61972]: DEBUG nova.compute.provider_tree [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.646793] env[61972]: DEBUG nova.network.neutron [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Successfully created port: c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.715867] env[61972]: DEBUG oslo_vmware.api [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389365, 'name': PowerOffVM_Task, 'duration_secs': 0.192475} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.716369] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.716578] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.716831] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47422612-083e-4ce3-b304-e8c473205412 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.795528] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.795774] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.795931] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore1] a4e65047-a892-4f18-8a14-0f5de25ce235 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.796227] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba3e9a44-eadd-4e0a-a4e6-01cd73272576 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.803398] env[61972]: DEBUG oslo_vmware.api [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 905.803398] env[61972]: value = "task-1389368" [ 905.803398] env[61972]: _type = "Task" [ 905.803398] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.813082] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 905.816388] env[61972]: DEBUG oslo_vmware.api [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.848294] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389366, 'name': Rename_Task, 'duration_secs': 0.150087} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.848886] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.849257] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9f3db80-4a5c-47c8-9b92-be77e9ed25c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.856336] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 905.856336] env[61972]: value = "task-1389369" [ 905.856336] env[61972]: _type = "Task" [ 905.856336] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.865866] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.117020] env[61972]: DEBUG nova.scheduler.client.report [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 906.313712] env[61972]: DEBUG oslo_vmware.api [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162857} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.314135] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.314361] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.314545] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.314718] env[61972]: INFO nova.compute.manager [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Took 1.13 seconds to destroy the instance on the hypervisor. [ 906.314958] env[61972]: DEBUG oslo.service.loopingcall [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.315170] env[61972]: DEBUG nova.compute.manager [-] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 906.315244] env[61972]: DEBUG nova.network.neutron [-] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.367603] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.619798] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.622509] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.264s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.624096] env[61972]: INFO nova.compute.claims [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.661913] env[61972]: INFO nova.scheduler.client.report [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Deleted allocations for instance 489f1de0-d1c8-4429-a6f1-24ea885282f3 [ 906.823071] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 906.853822] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.854064] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.854228] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.854410] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.854556] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.854938] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.855018] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.855223] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.855449] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.855730] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.859019] env[61972]: DEBUG nova.virt.hardware [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.859019] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b83bf3-4b11-48f3-8582-07d84c4df1c6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.873771] env[61972]: DEBUG oslo_vmware.api [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389369, 'name': PowerOnVM_Task, 'duration_secs': 0.686342} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.874146] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.874351] env[61972]: INFO nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Took 8.05 seconds to spawn the instance on the hypervisor. [ 906.874526] env[61972]: DEBUG nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 906.875918] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee0f461-0d1b-43ca-9fef-3d399ce896da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.881625] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a56987f-e679-4589-a4c0-288489490072 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.950653] env[61972]: DEBUG nova.compute.manager [req-dfcc0564-2c37-4c6c-a80e-a45ae09fcd56 req-31e8d968-78df-4573-af57-f5c67b107b56 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Received event network-vif-deleted-c0baab24-8aa5-4d82-9f32-62579ec19c0b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 906.950890] env[61972]: INFO nova.compute.manager [req-dfcc0564-2c37-4c6c-a80e-a45ae09fcd56 req-31e8d968-78df-4573-af57-f5c67b107b56 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Neutron deleted interface c0baab24-8aa5-4d82-9f32-62579ec19c0b; detaching it from the instance and deleting it from the info cache [ 906.951285] env[61972]: DEBUG nova.network.neutron [req-dfcc0564-2c37-4c6c-a80e-a45ae09fcd56 req-31e8d968-78df-4573-af57-f5c67b107b56 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.174394] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3c0ee300-9b21-4ca0-b524-3e8c59a1efc7 tempest-ServerPasswordTestJSON-1668341779 tempest-ServerPasswordTestJSON-1668341779-project-member] Lock "489f1de0-d1c8-4429-a6f1-24ea885282f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.191s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.216038] env[61972]: DEBUG nova.network.neutron [-] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.412377] env[61972]: INFO nova.compute.manager [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Took 21.04 seconds to build instance. [ 907.455761] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6a71f6e-4092-4a7c-8fb9-d80f9874c868 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.466572] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3629f0a4-43e5-473d-b6e6-0fa80c8f1952 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.501701] env[61972]: DEBUG nova.compute.manager [req-dfcc0564-2c37-4c6c-a80e-a45ae09fcd56 req-31e8d968-78df-4573-af57-f5c67b107b56 service nova] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Detach interface failed, port_id=c0baab24-8aa5-4d82-9f32-62579ec19c0b, reason: Instance a4e65047-a892-4f18-8a14-0f5de25ce235 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 907.542873] env[61972]: DEBUG nova.network.neutron [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Successfully updated port: c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 907.643877] env[61972]: DEBUG nova.compute.manager [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 907.644171] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdbb643-847f-4f32-87a7-a36a43fd301e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.717184] env[61972]: INFO nova.compute.manager [-] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Took 1.40 seconds to deallocate network for instance. [ 907.916951] env[61972]: DEBUG oslo_concurrency.lockutils [None req-10c09a0e-2543-4ad0-ab13-eed09aef2202 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.549s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.917142] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0792c931-b4a1-491a-8576-f72203c23066 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.926913] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95377a8d-59b5-45a2-9a4c-6125e40b3a5d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.964910] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3327ea-6891-4667-b9b7-fc1dec0f44ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.973347] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc7ebe4-95d1-4bab-a267-887adf9b8eed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.990807] env[61972]: DEBUG nova.compute.provider_tree [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.047968] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.047968] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.047968] env[61972]: DEBUG nova.network.neutron [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 908.163593] env[61972]: INFO nova.compute.manager [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] instance snapshotting [ 908.166027] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a147f4-145e-473f-b813-0a15a4f7da3b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.193395] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d47ed6-75de-4d93-a9f5-d961c3a699c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.229031] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.494843] env[61972]: DEBUG nova.scheduler.client.report [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 908.599768] env[61972]: DEBUG nova.objects.instance [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lazy-loading 'flavor' on Instance uuid 21440243-458c-4640-b0ba-8f3b8b1b0720 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.616554] env[61972]: DEBUG nova.network.neutron [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.707489] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 908.707821] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3afc4943-c801-49c3-ac75-fa27c1182457 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.716946] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 908.716946] env[61972]: value = "task-1389370" [ 908.716946] env[61972]: _type = "Task" [ 908.716946] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.726475] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.780632] env[61972]: DEBUG nova.network.neutron [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updating instance_info_cache with network_info: [{"id": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "address": "fa:16:3e:a9:1d:15", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e9a7e3-a8", "ovs_interfaceid": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.995345] env[61972]: DEBUG nova.compute.manager [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Received event network-vif-plugged-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 908.996240] env[61972]: DEBUG oslo_concurrency.lockutils [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] Acquiring lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.996807] env[61972]: DEBUG oslo_concurrency.lockutils [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.997537] env[61972]: DEBUG oslo_concurrency.lockutils [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.998131] env[61972]: DEBUG nova.compute.manager [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] No waiting events found dispatching network-vif-plugged-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 908.998766] env[61972]: WARNING nova.compute.manager [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Received unexpected event network-vif-plugged-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb for instance with vm_state building and task_state spawning. [ 908.999037] env[61972]: DEBUG nova.compute.manager [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Received event network-changed-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 908.999228] env[61972]: DEBUG nova.compute.manager [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Refreshing instance network info cache due to event network-changed-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 908.999449] env[61972]: DEBUG oslo_concurrency.lockutils [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] Acquiring lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.000493] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.001211] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 909.003958] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.669s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.004291] env[61972]: DEBUG nova.objects.instance [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lazy-loading 'resources' on Instance uuid a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.106205] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.106447] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.231397] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389370, 'name': CreateSnapshot_Task, 'duration_secs': 0.468834} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.234504] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 909.235865] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079837c1-0dd8-4128-a07d-149029d0d3d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.289480] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.289480] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Instance network_info: |[{"id": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "address": "fa:16:3e:a9:1d:15", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e9a7e3-a8", "ovs_interfaceid": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 909.289629] env[61972]: DEBUG oslo_concurrency.lockutils [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] Acquired lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.289629] env[61972]: DEBUG nova.network.neutron [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Refreshing network info cache for port c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.294136] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:1d:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8e9a7e3-a835-49e7-a4a5-9b864104e5fb', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.310603] env[61972]: DEBUG oslo.service.loopingcall [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.314311] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 909.314803] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cac80c2f-cfe0-4478-846d-2475f7cef5aa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.344061] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.344061] env[61972]: value = "task-1389371" [ 909.344061] env[61972]: _type = "Task" [ 909.344061] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.357641] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389371, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.396527] env[61972]: DEBUG nova.compute.manager [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Stashing vm_state: active {{(pid=61972) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 909.508402] env[61972]: DEBUG nova.compute.utils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 909.514925] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 909.516255] env[61972]: DEBUG nova.network.neutron [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.588124] env[61972]: DEBUG nova.policy [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0f8f690f32941a58ee86cdcfb1107af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e19bbeb0f2504f7bbc0b88f978f9fd12', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 909.750502] env[61972]: DEBUG nova.network.neutron [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.758772] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 909.761713] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a01add43-9f6d-41c1-b25f-945fe7c5c01b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.773457] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 909.773457] env[61972]: value = "task-1389372" [ 909.773457] env[61972]: _type = "Task" [ 909.773457] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.783038] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389372, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.816116] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6f32f5-8936-4156-9e5f-4cd801136752 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.823788] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87407420-dd44-41de-bbe0-fcef2bc04f88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.860348] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579c1c9e-f9bc-4cfc-9624-d683eba0939b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.874382] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389371, 'name': CreateVM_Task, 'duration_secs': 0.342322} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.876429] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 909.877177] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.877664] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.878116] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 909.879570] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a076d7c1-0958-4786-a908-5f39a75f23bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.883451] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e7f6929-b69c-4c01-a305-4ed6b89c5c4f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.891268] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 909.891268] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]520e14c0-d30a-1b15-0038-4785cd68b8c5" [ 909.891268] env[61972]: _type = "Task" [ 909.891268] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.902101] env[61972]: DEBUG nova.compute.provider_tree [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.916727] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]520e14c0-d30a-1b15-0038-4785cd68b8c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009874} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.917566] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.918225] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 909.918225] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.918225] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.918407] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.919114] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76a9e676-72a8-4d60-a8c2-435ae7289952 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.924890] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.927811] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.928043] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 909.928787] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3bbe0e3-96bf-492e-ad43-86d103649c3f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.935073] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 909.935073] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5256c510-c068-d8c5-c772-fd07e4301d95" [ 909.935073] env[61972]: _type = "Task" [ 909.935073] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.942946] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5256c510-c068-d8c5-c772-fd07e4301d95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.966591] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.966826] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.015691] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 910.075818] env[61972]: DEBUG nova.network.neutron [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Successfully created port: 2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.088662] env[61972]: DEBUG nova.network.neutron [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updated VIF entry in instance network info cache for port c8e9a7e3-a835-49e7-a4a5-9b864104e5fb. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.089065] env[61972]: DEBUG nova.network.neutron [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updating instance_info_cache with network_info: [{"id": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "address": "fa:16:3e:a9:1d:15", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e9a7e3-a8", "ovs_interfaceid": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.186595] env[61972]: DEBUG nova.network.neutron [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.282717] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389372, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.410578] env[61972]: DEBUG nova.scheduler.client.report [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 910.445130] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5256c510-c068-d8c5-c772-fd07e4301d95, 'name': SearchDatastore_Task, 'duration_secs': 0.009124} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.446035] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a55129b0-2ac6-40ae-b7bc-21a86bd7000d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.452092] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 910.452092] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5201ccea-db94-be02-6708-be9d64b37c96" [ 910.452092] env[61972]: _type = "Task" [ 910.452092] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.460524] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5201ccea-db94-be02-6708-be9d64b37c96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.471413] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 910.592218] env[61972]: DEBUG oslo_concurrency.lockutils [req-384ef59f-f46a-4839-9d1d-f47ff82755b8 req-55e1bf37-ef82-47dd-b29d-e0b8bd4cacbf service nova] Releasing lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.688958] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.689254] env[61972]: DEBUG nova.compute.manager [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Inject network info {{(pid=61972) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 910.689538] env[61972]: DEBUG nova.compute.manager [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] network_info to inject: |[{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 910.694295] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Reconfiguring VM instance to set the machine id {{(pid=61972) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 910.694921] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb13c08e-7bd8-4a66-8df4-1ddd1806921a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.712126] env[61972]: DEBUG oslo_vmware.api [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 910.712126] env[61972]: value = "task-1389373" [ 910.712126] env[61972]: _type = "Task" [ 910.712126] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.720832] env[61972]: DEBUG oslo_vmware.api [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389373, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.783857] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389372, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.830481] env[61972]: DEBUG nova.objects.instance [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lazy-loading 'flavor' on Instance uuid 21440243-458c-4640-b0ba-8f3b8b1b0720 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 910.917270] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.913s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.920060] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.498s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.920317] env[61972]: DEBUG nova.objects.instance [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'resources' on Instance uuid 1597e0f2-f67a-406e-9ef0-4d39b353ab0a {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 910.941757] env[61972]: INFO nova.scheduler.client.report [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted allocations for instance a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee [ 910.963949] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5201ccea-db94-be02-6708-be9d64b37c96, 'name': SearchDatastore_Task, 'duration_secs': 0.009003} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.963949] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.963949] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] c274f675-f45e-49e7-8bf3-582a6977d95c/c274f675-f45e-49e7-8bf3-582a6977d95c.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 910.965030] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fd0d069-3ad3-4162-a679-435d05d5c9ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.970720] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 910.970720] env[61972]: value = "task-1389374" [ 910.970720] env[61972]: _type = "Task" [ 910.970720] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.980213] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.994143] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.021054] env[61972]: DEBUG nova.compute.manager [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 911.021275] env[61972]: DEBUG nova.compute.manager [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing instance network info cache due to event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 911.021491] env[61972]: DEBUG oslo_concurrency.lockutils [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.021639] env[61972]: DEBUG oslo_concurrency.lockutils [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.021801] env[61972]: DEBUG nova.network.neutron [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.025468] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 911.047921] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.048636] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.048888] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.049246] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.049518] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.049757] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.050029] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.050282] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.050534] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.050859] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.051109] env[61972]: DEBUG nova.virt.hardware [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.052398] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8b045f-28a7-44d5-8c57-c47727fd9960 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.061572] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff667c0-24c4-4c53-8a55-a6f42a668aa5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.225107] env[61972]: DEBUG oslo_vmware.api [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389373, 'name': ReconfigVM_Task, 'duration_secs': 0.325769} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.225504] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffdc1fd-9eda-414a-b2a7-bfa3b2d3abb8 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Reconfigured VM instance to set the machine id {{(pid=61972) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 911.288662] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389372, 'name': CloneVM_Task, 'duration_secs': 1.444927} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.288986] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Created linked-clone VM from snapshot [ 911.289844] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02782ea-8e69-41cb-b476-b0e71795a7d2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.299049] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Uploading image aad3e858-b1b6-43b1-917a-f092e48268b2 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 911.327070] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 911.327070] env[61972]: value = "vm-294881" [ 911.327070] env[61972]: _type = "VirtualMachine" [ 911.327070] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 911.327443] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-09de5172-ab86-4e44-a47c-8f768630afed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.334759] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.337021] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lease: (returnval){ [ 911.337021] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52330184-5a85-9881-7404-02ba9841dd75" [ 911.337021] env[61972]: _type = "HttpNfcLease" [ 911.337021] env[61972]: } obtained for exporting VM: (result){ [ 911.337021] env[61972]: value = "vm-294881" [ 911.337021] env[61972]: _type = "VirtualMachine" [ 911.337021] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 911.337368] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the lease: (returnval){ [ 911.337368] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52330184-5a85-9881-7404-02ba9841dd75" [ 911.337368] env[61972]: _type = "HttpNfcLease" [ 911.337368] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 911.347765] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 911.347765] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52330184-5a85-9881-7404-02ba9841dd75" [ 911.347765] env[61972]: _type = "HttpNfcLease" [ 911.347765] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 911.449892] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0344eaa7-b865-4e6f-bbb4-91842f12a147 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.637s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.481069] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389374, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455174} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.481448] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] c274f675-f45e-49e7-8bf3-582a6977d95c/c274f675-f45e-49e7-8bf3-582a6977d95c.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 911.481738] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 911.482148] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de25b8d1-0791-48a9-9121-23efc6741ba2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.492320] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 911.492320] env[61972]: value = "task-1389376" [ 911.492320] env[61972]: _type = "Task" [ 911.492320] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.501623] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389376, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.675268] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f65add3-726c-4f5d-8f4d-b02622b4f784 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.684416] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4ab48f-8130-4270-9732-fcdb0fcc913b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.729904] env[61972]: DEBUG nova.network.neutron [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Successfully updated port: 2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.731619] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8b122a-5233-4631-8be5-e8358db9a0d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.742617] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207d07c3-3745-49a3-9c9d-d696b8e72591 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.759762] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 911.850584] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 911.850584] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52330184-5a85-9881-7404-02ba9841dd75" [ 911.850584] env[61972]: _type = "HttpNfcLease" [ 911.850584] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 911.851021] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 911.851021] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52330184-5a85-9881-7404-02ba9841dd75" [ 911.851021] env[61972]: _type = "HttpNfcLease" [ 911.851021] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 911.851673] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c96cb2-3a82-48fd-9be2-d62949efe55e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.858941] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52233340-8e0e-1450-9b84-271e6168c1f9/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 911.859127] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52233340-8e0e-1450-9b84-271e6168c1f9/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 911.921428] env[61972]: DEBUG nova.network.neutron [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updated VIF entry in instance network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 911.921837] env[61972]: DEBUG nova.network.neutron [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.958102] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d3ca2d5a-1f67-4055-a331-a19fca22850f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.001145] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389376, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.159561} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.001429] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 912.002198] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f4544b-bdec-407c-9621-70a80e3b5d78 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.023739] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] c274f675-f45e-49e7-8bf3-582a6977d95c/c274f675-f45e-49e7-8bf3-582a6977d95c.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.024038] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e5e202a-16dd-4cd0-b189-d50e225aec04 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.043282] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 912.043282] env[61972]: value = "task-1389377" [ 912.043282] env[61972]: _type = "Task" [ 912.043282] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.051157] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389377, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.238451] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "refresh_cache-12a1a1ee-9aa1-4dda-9276-68492718e404" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.238695] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquired lock "refresh_cache-12a1a1ee-9aa1-4dda-9276-68492718e404" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.239066] env[61972]: DEBUG nova.network.neutron [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.280342] env[61972]: ERROR nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [req-91ee4437-30a7-47d2-bb11-3b8749b0baac] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-91ee4437-30a7-47d2-bb11-3b8749b0baac"}]} [ 912.301493] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 912.325074] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 912.325591] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.338511] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 912.357497] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 912.429180] env[61972]: DEBUG oslo_concurrency.lockutils [req-6a2a1e78-c8ca-416a-b4ab-336d242f50e2 req-9cd6006b-b986-4c2e-b05e-fb2d1afe85b7 service nova] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.430329] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.431083] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.431083] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.431083] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.431283] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.433229] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.435859] env[61972]: INFO nova.compute.manager [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Terminating instance [ 912.555808] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389377, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.593351] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f541c556-6a3a-4a26-b9d6-f9b5eeba4bed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.602044] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529d03eb-bf5b-4556-9ecf-3d7a6dd1a832 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.635947] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2db083-1ba9-4b50-9608-50322ef5a409 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.644211] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e39c405-3368-4523-808d-993dd7fd7f1e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.659919] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.787822] env[61972]: DEBUG nova.network.neutron [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.855459] env[61972]: DEBUG nova.network.neutron [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.943192] env[61972]: DEBUG nova.compute.manager [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 912.943463] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.944623] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869dc4d8-867a-49d6-a538-6e70ce1db3e8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.955906] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.956396] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a389c9c-f7f5-4d96-8be9-06c149d19718 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.963806] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 912.963806] env[61972]: value = "task-1389378" [ 912.963806] env[61972]: _type = "Task" [ 912.963806] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.965086] env[61972]: DEBUG nova.network.neutron [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Updating instance_info_cache with network_info: [{"id": "2b837067-f779-43d6-9b1c-302a175bf675", "address": "fa:16:3e:ab:fd:a7", "network": {"id": "0adaedcb-aaa1-4dcd-9487-6f6cec9224d9", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1392549695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e19bbeb0f2504f7bbc0b88f978f9fd12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b837067-f7", "ovs_interfaceid": "2b837067-f779-43d6-9b1c-302a175bf675", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.975746] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.982091] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.982452] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.054950] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389377, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.064631] env[61972]: DEBUG nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Received event network-vif-plugged-2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 913.065137] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Acquiring lock "12a1a1ee-9aa1-4dda-9276-68492718e404-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.065518] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.065836] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.066187] env[61972]: DEBUG nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] No waiting events found dispatching network-vif-plugged-2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.066552] env[61972]: WARNING nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Received unexpected event network-vif-plugged-2b837067-f779-43d6-9b1c-302a175bf675 for instance with vm_state building and task_state spawning. [ 913.066904] env[61972]: DEBUG nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Received event network-changed-2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 913.067264] env[61972]: DEBUG nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Refreshing instance network info cache due to event network-changed-2b837067-f779-43d6-9b1c-302a175bf675. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 913.067588] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Acquiring lock "refresh_cache-12a1a1ee-9aa1-4dda-9276-68492718e404" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.186068] env[61972]: ERROR nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [req-d2635c36-6295-45f0-89dc-b12c524edae6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d2635c36-6295-45f0-89dc-b12c524edae6"}]} [ 913.204082] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 913.223322] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 913.223577] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.235556] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 913.254346] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 913.268522] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "a77d41aa-13ba-4d26-b5fd-4928891948ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.468630] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Releasing lock "refresh_cache-12a1a1ee-9aa1-4dda-9276-68492718e404" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.469094] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Instance network_info: |[{"id": "2b837067-f779-43d6-9b1c-302a175bf675", "address": "fa:16:3e:ab:fd:a7", "network": {"id": "0adaedcb-aaa1-4dcd-9487-6f6cec9224d9", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1392549695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e19bbeb0f2504f7bbc0b88f978f9fd12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b837067-f7", "ovs_interfaceid": "2b837067-f779-43d6-9b1c-302a175bf675", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 913.469609] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Acquired lock "refresh_cache-12a1a1ee-9aa1-4dda-9276-68492718e404" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.469879] env[61972]: DEBUG nova.network.neutron [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Refreshing network info cache for port 2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.471948] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:fd:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba4f6497-e2b4-43b5-9819-6927865ae974', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b837067-f779-43d6-9b1c-302a175bf675', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.480092] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Creating folder: Project (e19bbeb0f2504f7bbc0b88f978f9fd12). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.489807] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50e83a1c-efe1-4e92-a447-d3ca97e76161 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.495015] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 913.501818] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.505773] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Created folder: Project (e19bbeb0f2504f7bbc0b88f978f9fd12) in parent group-v294799. [ 913.506102] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Creating folder: Instances. Parent ref: group-v294882. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.507012] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98682ab4-4173-49a2-ba5f-21640787b747 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.511299] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94f0e01-ae6c-4f24-9d20-300b86b9041a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.516706] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Created folder: Instances in parent group-v294882. [ 913.517380] env[61972]: DEBUG oslo.service.loopingcall [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 913.517380] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.517655] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ed39b69-81a7-4e9f-87ec-63fc0d4cc01f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.542024] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120a90fb-ae33-444c-8e7d-7c42e83abd66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.548388] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.548388] env[61972]: value = "task-1389381" [ 913.548388] env[61972]: _type = "Task" [ 913.548388] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.584991] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a5af64-9335-4d27-a8df-23b55f38d1e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.595618] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389381, 'name': CreateVM_Task} progress is 15%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.595935] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389377, 'name': ReconfigVM_Task, 'duration_secs': 1.384112} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.596777] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Reconfigured VM instance instance-00000055 to attach disk [datastore1] c274f675-f45e-49e7-8bf3-582a6977d95c/c274f675-f45e-49e7-8bf3-582a6977d95c.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.597522] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8fda944-d66d-4489-8e69-c0548adc56f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.603535] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d2497f-4d4a-4822-b2b0-b3ef7781b095 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.612014] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 913.612014] env[61972]: value = "task-1389382" [ 913.612014] env[61972]: _type = "Task" [ 913.612014] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.624359] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.634366] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389382, 'name': Rename_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.768059] env[61972]: DEBUG nova.network.neutron [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.821463] env[61972]: DEBUG nova.network.neutron [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Updated VIF entry in instance network info cache for port 2b837067-f779-43d6-9b1c-302a175bf675. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.821993] env[61972]: DEBUG nova.network.neutron [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Updating instance_info_cache with network_info: [{"id": "2b837067-f779-43d6-9b1c-302a175bf675", "address": "fa:16:3e:ab:fd:a7", "network": {"id": "0adaedcb-aaa1-4dcd-9487-6f6cec9224d9", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1392549695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e19bbeb0f2504f7bbc0b88f978f9fd12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b837067-f7", "ovs_interfaceid": "2b837067-f779-43d6-9b1c-302a175bf675", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.979317] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389378, 'name': PowerOffVM_Task, 'duration_secs': 0.705077} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.979697] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.979921] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.980266] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8964f32a-efd0-4b0f-80ad-3e073ab80d40 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.016474] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.046244] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.046576] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.046812] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleting the datastore file [datastore2] 0cd09167-2c2f-4cad-b26d-35aa208fbf79 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.047179] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90d65315-a278-4021-bb6b-1742153f0178 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.054384] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 914.054384] env[61972]: value = "task-1389384" [ 914.054384] env[61972]: _type = "Task" [ 914.054384] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.066732] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389381, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.069982] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.124572] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389382, 'name': Rename_Task, 'duration_secs': 0.179219} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.124878] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.125208] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-186d0a59-4b87-4ded-bac4-bfabb847e243 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.135284] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 914.135284] env[61972]: value = "task-1389385" [ 914.135284] env[61972]: _type = "Task" [ 914.135284] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.144889] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.167781] env[61972]: DEBUG nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updated inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 914.168143] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 generation from 100 to 101 during operation: update_inventory {{(pid=61972) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 914.168368] env[61972]: DEBUG nova.compute.provider_tree [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.271117] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.271462] env[61972]: DEBUG nova.compute.manager [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Inject network info {{(pid=61972) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 914.271774] env[61972]: DEBUG nova.compute.manager [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] network_info to inject: |[{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 914.277937] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Reconfiguring VM instance to set the machine id {{(pid=61972) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 914.278284] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1b1852b-aef3-4d43-b17e-5a546074fc1e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.293996] env[61972]: DEBUG oslo_vmware.api [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 914.293996] env[61972]: value = "task-1389386" [ 914.293996] env[61972]: _type = "Task" [ 914.293996] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.302615] env[61972]: DEBUG oslo_vmware.api [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389386, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.325371] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Releasing lock "refresh_cache-12a1a1ee-9aa1-4dda-9276-68492718e404" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.325371] env[61972]: DEBUG nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 914.325605] env[61972]: DEBUG nova.compute.manager [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing instance network info cache due to event network-changed-1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 914.325848] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Acquiring lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.326031] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Acquired lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.326254] env[61972]: DEBUG nova.network.neutron [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Refreshing network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 914.568356] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389381, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.571213] env[61972]: DEBUG oslo_vmware.api [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166418} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.571455] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.571638] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.571821] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.571998] env[61972]: INFO nova.compute.manager [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Took 1.63 seconds to destroy the instance on the hypervisor. [ 914.572273] env[61972]: DEBUG oslo.service.loopingcall [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.572468] env[61972]: DEBUG nova.compute.manager [-] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 914.572563] env[61972]: DEBUG nova.network.neutron [-] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.619590] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "21440243-458c-4640-b0ba-8f3b8b1b0720" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.620444] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.620444] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "21440243-458c-4640-b0ba-8f3b8b1b0720-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.620444] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.620444] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.623852] env[61972]: INFO nova.compute.manager [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Terminating instance [ 914.647559] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389385, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.674358] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.754s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.676960] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.448s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.677235] env[61972]: DEBUG nova.objects.instance [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'resources' on Instance uuid a4e65047-a892-4f18-8a14-0f5de25ce235 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.700021] env[61972]: INFO nova.scheduler.client.report [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted allocations for instance 1597e0f2-f67a-406e-9ef0-4d39b353ab0a [ 914.804242] env[61972]: DEBUG oslo_vmware.api [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389386, 'name': ReconfigVM_Task, 'duration_secs': 0.230597} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.804545] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a25c0cad-e6c4-4ebe-819b-b39a9676c1f3 tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Reconfigured VM instance to set the machine id {{(pid=61972) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 915.066576] env[61972]: DEBUG nova.network.neutron [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updated VIF entry in instance network info cache for port 1296b6ff-7e29-4bc6-8230-f6b7696702f8. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 915.066576] env[61972]: DEBUG nova.network.neutron [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [{"id": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "address": "fa:16:3e:72:ae:6b", "network": {"id": "8ff0d90d-3251-4958-af11-9c8f308617e9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-528709241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "300010fae08d4c8aa733d491ff9acbfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1296b6ff-7e", "ovs_interfaceid": "1296b6ff-7e29-4bc6-8230-f6b7696702f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.072698] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389381, 'name': CreateVM_Task, 'duration_secs': 1.523163} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.072996] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.074588] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.074815] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.075161] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 915.075431] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51a17c3c-7b23-4eef-84c2-1dd10699b276 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.080441] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 915.080441] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]529244eb-220e-b180-a1ea-daf1defccdb9" [ 915.080441] env[61972]: _type = "Task" [ 915.080441] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.090142] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529244eb-220e-b180-a1ea-daf1defccdb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.127427] env[61972]: DEBUG nova.compute.manager [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 915.127788] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.128573] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3c2259-73fb-4cbf-bace-1c3f45db6f1d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.137122] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 915.137408] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f80ccd6-4d02-47e5-81e7-2671fdf6a0fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.147337] env[61972]: DEBUG oslo_vmware.api [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389385, 'name': PowerOnVM_Task, 'duration_secs': 0.641217} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.148651] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.148912] env[61972]: INFO nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Took 8.33 seconds to spawn the instance on the hypervisor. [ 915.149161] env[61972]: DEBUG nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 915.149751] env[61972]: DEBUG oslo_vmware.api [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 915.149751] env[61972]: value = "task-1389387" [ 915.149751] env[61972]: _type = "Task" [ 915.149751] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.150716] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3db7ff3-c419-4f5c-9af7-a818c1204319 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.168889] env[61972]: DEBUG oslo_vmware.api [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.179815] env[61972]: DEBUG nova.compute.manager [req-b29a94b9-389e-4c82-b577-2019fbf3cf36 req-8478dd56-134c-4240-8121-1dbb9438e7b5 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Received event network-vif-deleted-b4dc0e54-ff1f-458b-98f8-cfddec6ef15a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 915.179903] env[61972]: INFO nova.compute.manager [req-b29a94b9-389e-4c82-b577-2019fbf3cf36 req-8478dd56-134c-4240-8121-1dbb9438e7b5 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Neutron deleted interface b4dc0e54-ff1f-458b-98f8-cfddec6ef15a; detaching it from the instance and deleting it from the info cache [ 915.180227] env[61972]: DEBUG nova.network.neutron [req-b29a94b9-389e-4c82-b577-2019fbf3cf36 req-8478dd56-134c-4240-8121-1dbb9438e7b5 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.208820] env[61972]: DEBUG oslo_concurrency.lockutils [None req-55add82c-26da-4ef2-a606-f6109883e6d4 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "1597e0f2-f67a-406e-9ef0-4d39b353ab0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.329s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.330938] env[61972]: DEBUG nova.network.neutron [-] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.432434] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f705ca-383a-45be-a636-71f6c099ab41 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.440848] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7328302-5582-4847-b96b-cf6aa9d9c9fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.475854] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c682a1a2-62d7-4a91-9381-b2510b56fa84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.484467] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46481155-ad97-4e93-8cb5-22c73b77b084 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.499144] env[61972]: DEBUG nova.compute.provider_tree [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.569452] env[61972]: DEBUG oslo_concurrency.lockutils [req-152412f5-f608-4903-8701-dce9cb72821a req-d8a864b8-4abd-4670-bf2f-56d3c533019f service nova] Releasing lock "refresh_cache-21440243-458c-4640-b0ba-8f3b8b1b0720" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.591646] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]529244eb-220e-b180-a1ea-daf1defccdb9, 'name': SearchDatastore_Task, 'duration_secs': 0.008407} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.591928] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.592286] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.592429] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.592577] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.592764] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.593076] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21451861-1f27-4b92-9c0c-8e135b9a21c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.601742] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.601970] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 915.603858] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ff6c304-65bd-4d71-89c6-5feaf4c49c2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.609365] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 915.609365] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208bf90-6766-b2d8-6df0-6121811948e9" [ 915.609365] env[61972]: _type = "Task" [ 915.609365] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.617778] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208bf90-6766-b2d8-6df0-6121811948e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.664778] env[61972]: DEBUG oslo_vmware.api [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389387, 'name': PowerOffVM_Task, 'duration_secs': 0.36426} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.665091] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.665269] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.665522] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5fa2039-d4ac-44db-851c-15c57bd790ec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.675900] env[61972]: INFO nova.compute.manager [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Took 23.12 seconds to build instance. [ 915.685303] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bdb7b83-6ef2-4a32-b8b7-2b77ddab8fed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.695339] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f77c10-ff6d-48bf-a4ad-48214e9fde68 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.727086] env[61972]: DEBUG nova.compute.manager [req-b29a94b9-389e-4c82-b577-2019fbf3cf36 req-8478dd56-134c-4240-8121-1dbb9438e7b5 service nova] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Detach interface failed, port_id=b4dc0e54-ff1f-458b-98f8-cfddec6ef15a, reason: Instance 0cd09167-2c2f-4cad-b26d-35aa208fbf79 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 915.730302] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.730427] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.730930] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Deleting the datastore file [datastore1] 21440243-458c-4640-b0ba-8f3b8b1b0720 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.731159] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afe523db-ae49-45c1-94da-4d7ec001015f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.737721] env[61972]: DEBUG oslo_vmware.api [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for the task: (returnval){ [ 915.737721] env[61972]: value = "task-1389389" [ 915.737721] env[61972]: _type = "Task" [ 915.737721] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.745970] env[61972]: DEBUG oslo_vmware.api [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.835652] env[61972]: INFO nova.compute.manager [-] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Took 1.26 seconds to deallocate network for instance. [ 916.003294] env[61972]: DEBUG nova.scheduler.client.report [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 916.119513] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208bf90-6766-b2d8-6df0-6121811948e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.120427] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fcd547d-d92d-424a-9e7a-b6fce9e3a231 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.125901] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 916.125901] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d1102f-46e3-b9e4-2f74-fdc432e46614" [ 916.125901] env[61972]: _type = "Task" [ 916.125901] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.133643] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d1102f-46e3-b9e4-2f74-fdc432e46614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.178074] env[61972]: DEBUG oslo_concurrency.lockutils [None req-03cfdb08-4b14-402c-aeae-d2ef92d736fb tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.627s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.250965] env[61972]: DEBUG oslo_vmware.api [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Task: {'id': task-1389389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167596} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.251411] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.251608] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.251835] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.251965] env[61972]: INFO nova.compute.manager [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Took 1.12 seconds to destroy the instance on the hypervisor. [ 916.252237] env[61972]: DEBUG oslo.service.loopingcall [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.252432] env[61972]: DEBUG nova.compute.manager [-] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 916.252525] env[61972]: DEBUG nova.network.neutron [-] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.346037] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.365849] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "c274f675-f45e-49e7-8bf3-582a6977d95c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.366149] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.366337] env[61972]: INFO nova.compute.manager [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Shelving [ 916.509176] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.511707] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.587s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.541283] env[61972]: INFO nova.scheduler.client.report [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance a4e65047-a892-4f18-8a14-0f5de25ce235 [ 916.637660] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d1102f-46e3-b9e4-2f74-fdc432e46614, 'name': SearchDatastore_Task, 'duration_secs': 0.009776} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.637954] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.638259] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 12a1a1ee-9aa1-4dda-9276-68492718e404/12a1a1ee-9aa1-4dda-9276-68492718e404.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.638537] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b70b29de-2041-4bd5-be9b-04755860f04a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.648042] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 916.648042] env[61972]: value = "task-1389390" [ 916.648042] env[61972]: _type = "Task" [ 916.648042] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.655102] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.740630] env[61972]: DEBUG nova.compute.manager [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Received event network-changed-a827c64c-a00d-4d2d-af6e-29c34ca4b899 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 916.740630] env[61972]: DEBUG nova.compute.manager [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Refreshing instance network info cache due to event network-changed-a827c64c-a00d-4d2d-af6e-29c34ca4b899. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 916.740630] env[61972]: DEBUG oslo_concurrency.lockutils [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] Acquiring lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.740919] env[61972]: DEBUG oslo_concurrency.lockutils [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] Acquired lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.740996] env[61972]: DEBUG nova.network.neutron [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Refreshing network info cache for port a827c64c-a00d-4d2d-af6e-29c34ca4b899 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.023413] env[61972]: INFO nova.compute.claims [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.049969] env[61972]: ERROR nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 917.049969] env[61972]: ERROR nova.compute.manager Traceback (most recent call last): [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 917.049969] env[61972]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.049969] env[61972]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.049969] env[61972]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.049969] env[61972]: ERROR nova.compute.manager self.force_reraise() [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.049969] env[61972]: ERROR nova.compute.manager raise self.value [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.049969] env[61972]: ERROR nova.compute.manager updated_port = self._update_port( [ 917.049969] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.049969] env[61972]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 917.050613] env[61972]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.050613] env[61972]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 917.050613] env[61972]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 917.050613] env[61972]: ERROR nova.compute.manager [ 917.050613] env[61972]: Traceback (most recent call last): [ 917.050613] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 917.050613] env[61972]: listener.cb(fileno) [ 917.050613] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.050613] env[61972]: result = function(*args, **kwargs) [ 917.050613] env[61972]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 917.050613] env[61972]: return func(*args, **kwargs) [ 917.050613] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 917.050613] env[61972]: raise e [ 917.050613] env[61972]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 917.050613] env[61972]: nwinfo = self.network_api.allocate_for_instance( [ 917.050613] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.050613] env[61972]: created_port_ids = self._update_ports_for_instance( [ 917.050613] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.050613] env[61972]: with excutils.save_and_reraise_exception(): [ 917.050613] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.050613] env[61972]: self.force_reraise() [ 917.050613] env[61972]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.050613] env[61972]: raise self.value [ 917.050613] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.050613] env[61972]: updated_port = self._update_port( [ 917.050613] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.050613] env[61972]: _ensure_no_port_binding_failure(port) [ 917.050613] env[61972]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.050613] env[61972]: raise exception.PortBindingFailed(port_id=port['id']) [ 917.051718] env[61972]: nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 917.051718] env[61972]: Removing descriptor: 21 [ 917.051718] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f7e2439-db5c-4b72-a89b-2c86538d2ce8 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "a4e65047-a892-4f18-8a14-0f5de25ce235" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.374s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.055803] env[61972]: ERROR nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Traceback (most recent call last): [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] yield resources [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self.driver.spawn(context, instance, image_meta, [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] vm_ref = self.build_virtual_machine(instance, [ 917.055803] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] for vif in network_info: [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return self._sync_wrapper(fn, *args, **kwargs) [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self.wait() [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self[:] = self._gt.wait() [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return self._exit_event.wait() [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 917.056383] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] result = hub.switch() [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return self.greenlet.switch() [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] result = function(*args, **kwargs) [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return func(*args, **kwargs) [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] raise e [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] nwinfo = self.network_api.allocate_for_instance( [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] created_port_ids = self._update_ports_for_instance( [ 917.056756] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] with excutils.save_and_reraise_exception(): [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self.force_reraise() [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] raise self.value [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] updated_port = self._update_port( [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] _ensure_no_port_binding_failure(port) [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] raise exception.PortBindingFailed(port_id=port['id']) [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 917.057145] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] [ 917.057531] env[61972]: INFO nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Terminating instance [ 917.156850] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491883} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.157217] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 12a1a1ee-9aa1-4dda-9276-68492718e404/12a1a1ee-9aa1-4dda-9276-68492718e404.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.157514] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.157664] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-100490e2-5380-449c-b3df-db32dc68dfc2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.166283] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 917.166283] env[61972]: value = "task-1389391" [ 917.166283] env[61972]: _type = "Task" [ 917.166283] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.172747] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.208264] env[61972]: DEBUG nova.compute.manager [req-06456d88-04ac-427d-b8e3-f4345b66e1f7 req-4f849270-ffb8-45fb-95fe-e8dd1aa3131a service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Received event network-vif-deleted-1296b6ff-7e29-4bc6-8230-f6b7696702f8 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 917.208438] env[61972]: INFO nova.compute.manager [req-06456d88-04ac-427d-b8e3-f4345b66e1f7 req-4f849270-ffb8-45fb-95fe-e8dd1aa3131a service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Neutron deleted interface 1296b6ff-7e29-4bc6-8230-f6b7696702f8; detaching it from the instance and deleting it from the info cache [ 917.208655] env[61972]: DEBUG nova.network.neutron [req-06456d88-04ac-427d-b8e3-f4345b66e1f7 req-4f849270-ffb8-45fb-95fe-e8dd1aa3131a service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.265030] env[61972]: DEBUG nova.network.neutron [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.379371] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.379843] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3346b42-ef5f-4fd2-8e76-e82532fb2def {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.387658] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 917.387658] env[61972]: value = "task-1389392" [ 917.387658] env[61972]: _type = "Task" [ 917.387658] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.392163] env[61972]: DEBUG nova.network.neutron [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.400310] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.460521] env[61972]: DEBUG nova.network.neutron [-] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.531270] env[61972]: INFO nova.compute.resource_tracker [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating resource usage from migration 82a52ded-77e8-48e1-ae67-66fa8c324ccb [ 917.562500] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.674386] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060333} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.677287] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.678437] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd19f62-aa02-49e6-af96-d95afeb043fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.702262] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 12a1a1ee-9aa1-4dda-9276-68492718e404/12a1a1ee-9aa1-4dda-9276-68492718e404.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.705141] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-997a49c2-b735-43c2-8e8e-19bbd65d6b01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.720877] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6dae7d4-3a1b-4849-8968-26ea5b5ff38d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.728993] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 917.728993] env[61972]: value = "task-1389393" [ 917.728993] env[61972]: _type = "Task" [ 917.728993] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.736835] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d23c18-70a3-4a86-938b-f0c3a969cd61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.758535] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.768570] env[61972]: DEBUG nova.compute.manager [req-06456d88-04ac-427d-b8e3-f4345b66e1f7 req-4f849270-ffb8-45fb-95fe-e8dd1aa3131a service nova] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Detach interface failed, port_id=1296b6ff-7e29-4bc6-8230-f6b7696702f8, reason: Instance 21440243-458c-4640-b0ba-8f3b8b1b0720 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 917.794305] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e610310-af4a-48d7-97ed-a1036daf38eb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.802344] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d246cff-fb32-42eb-adcd-f03dc281b689 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.833592] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e88e689-3444-4733-b523-c7f4bf933fcb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.841375] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c32fb6b-2ba3-4f73-b782-864c7c54d16e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.855117] env[61972]: DEBUG nova.compute.provider_tree [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 917.898199] env[61972]: DEBUG oslo_concurrency.lockutils [req-c938c1ba-4d72-4ed3-9b0a-7cb6358bc1bc req-43e733e1-1555-4767-8ce2-8fae9d5b1495 service nova] Releasing lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.899128] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389392, 'name': PowerOffVM_Task, 'duration_secs': 0.35061} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.899402] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.899627] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.900818] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.902133] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c694aad-a3e5-4a7b-85b3-86108e6e521a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.923073] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196b3153-0f89-4a4a-a929-0b38a185ca97 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.966288] env[61972]: INFO nova.compute.manager [-] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Took 1.71 seconds to deallocate network for instance. [ 918.239502] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389393, 'name': ReconfigVM_Task, 'duration_secs': 0.280577} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.239782] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 12a1a1ee-9aa1-4dda-9276-68492718e404/12a1a1ee-9aa1-4dda-9276-68492718e404.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.240474] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0a31f42-c11c-4bea-9cf3-36f93a21e4fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.248211] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 918.248211] env[61972]: value = "task-1389394" [ 918.248211] env[61972]: _type = "Task" [ 918.248211] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.256134] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389394, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.375226] env[61972]: ERROR nova.scheduler.client.report [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [req-277e99a9-da87-48f0-a250-72ec2a3cd1dd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f34b92c-91e8-4983-ae34-7426fcec3157. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-277e99a9-da87-48f0-a250-72ec2a3cd1dd"}]} [ 918.391482] env[61972]: DEBUG nova.scheduler.client.report [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Refreshing inventories for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 918.412405] env[61972]: DEBUG nova.scheduler.client.report [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updating ProviderTree inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 918.412668] env[61972]: DEBUG nova.compute.provider_tree [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 918.426757] env[61972]: DEBUG nova.scheduler.client.report [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Refreshing aggregate associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, aggregates: None {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 918.433090] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.435627] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 918.435934] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-11175a54-5dba-47c0-820b-085f355afac8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.446413] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 918.446413] env[61972]: value = "task-1389395" [ 918.446413] env[61972]: _type = "Task" [ 918.446413] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.454864] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389395, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.456082] env[61972]: DEBUG nova.scheduler.client.report [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Refreshing trait associations for resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61972) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 918.474571] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.482268] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "b9726bf4-a4b1-4b22-840f-98157d0d790c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.482586] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.482714] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "b9726bf4-a4b1-4b22-840f-98157d0d790c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.482925] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.483827] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.487132] env[61972]: INFO nova.compute.manager [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Terminating instance [ 918.625894] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.716436] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65c02563-a348-4415-bb21-3d3711202838" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.716698] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65c02563-a348-4415-bb21-3d3711202838" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.720267] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dec6185-55b2-477f-998b-a8ff2a008ae8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.729089] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b21930f-ed3d-43ef-99cb-80d6de726a25 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.762929] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4665472b-5baf-4a1a-93f6-29741ec857b5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.770746] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389394, 'name': Rename_Task, 'duration_secs': 0.145221} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.773289] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.773289] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20282a46-650f-491d-9878-de16666f3b23 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.775587] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9364d5e-814d-441d-bca5-f5e34ad89a31 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.791122] env[61972]: DEBUG nova.compute.provider_tree [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 918.794327] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 918.794327] env[61972]: value = "task-1389396" [ 918.794327] env[61972]: _type = "Task" [ 918.794327] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.795320] env[61972]: DEBUG nova.compute.manager [req-48ee97ba-981b-4c2e-9d8b-bcc4815818e6 req-6623e369-d2d5-4e0f-9928-98d5b1788b42 service nova] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Received event network-vif-deleted-a827c64c-a00d-4d2d-af6e-29c34ca4b899 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 918.805107] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389396, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.957892] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389395, 'name': CreateSnapshot_Task, 'duration_secs': 0.509777} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.958288] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 918.959395] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b539d198-d69b-41db-9d05-2605eb6de2be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.994348] env[61972]: DEBUG nova.compute.manager [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 918.995346] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.995746] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1ea125-6b86-4a91-85f7-332450aed5f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.005034] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.006108] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa046c2e-4fe7-4d11-a0de-7f4f7f289521 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.013095] env[61972]: DEBUG oslo_vmware.api [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 919.013095] env[61972]: value = "task-1389397" [ 919.013095] env[61972]: _type = "Task" [ 919.013095] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.021720] env[61972]: DEBUG oslo_vmware.api [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.128801] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.129391] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 919.129646] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.130454] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ed06479-03f8-41a3-9bc6-b0fa479d572d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.140700] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f60323-540e-449f-a29a-fabe4d6a63d2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.174424] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a77d41aa-13ba-4d26-b5fd-4928891948ce could not be found. [ 919.174696] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.174905] env[61972]: INFO nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Took 0.05 seconds to destroy the instance on the hypervisor. [ 919.175362] env[61972]: DEBUG oslo.service.loopingcall [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.176824] env[61972]: DEBUG nova.compute.manager [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 919.177009] env[61972]: DEBUG nova.network.neutron [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.204279] env[61972]: DEBUG nova.network.neutron [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.219849] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 919.310215] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389396, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.336744] env[61972]: DEBUG nova.scheduler.client.report [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updated inventory for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 919.337054] env[61972]: DEBUG nova.compute.provider_tree [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updating resource provider 2f34b92c-91e8-4983-ae34-7426fcec3157 generation from 102 to 103 during operation: update_inventory {{(pid=61972) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 919.337246] env[61972]: DEBUG nova.compute.provider_tree [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Updating inventory in ProviderTree for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 919.480579] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 919.480959] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-63e9528b-0b19-4326-9a70-fc6febefe1b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.490611] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 919.490611] env[61972]: value = "task-1389398" [ 919.490611] env[61972]: _type = "Task" [ 919.490611] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.500955] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389398, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.522250] env[61972]: DEBUG oslo_vmware.api [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389397, 'name': PowerOffVM_Task, 'duration_secs': 0.268823} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.522530] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.522701] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.523049] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e9ed091-0c06-40e6-8295-011a8a5a7470 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.589285] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.589558] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.589760] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Deleting the datastore file [datastore2] b9726bf4-a4b1-4b22-840f-98157d0d790c {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.590072] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8782f3ba-27e8-4de6-a068-905a77a060ed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.597453] env[61972]: DEBUG oslo_vmware.api [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for the task: (returnval){ [ 919.597453] env[61972]: value = "task-1389400" [ 919.597453] env[61972]: _type = "Task" [ 919.597453] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.606193] env[61972]: DEBUG oslo_vmware.api [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.708478] env[61972]: DEBUG nova.network.neutron [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.746636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.813022] env[61972]: DEBUG oslo_vmware.api [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389396, 'name': PowerOnVM_Task, 'duration_secs': 0.570945} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.813022] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.813022] env[61972]: INFO nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Took 8.79 seconds to spawn the instance on the hypervisor. [ 919.813022] env[61972]: DEBUG nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 919.813022] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8a9f92-369a-425c-9223-2682bacc944a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.843543] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.332s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.843786] env[61972]: INFO nova.compute.manager [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Migrating [ 919.844063] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.844220] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.845510] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.852s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.847918] env[61972]: INFO nova.compute.claims [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.852403] env[61972]: INFO nova.compute.rpcapi [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 919.853285] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.003279] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389398, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.107659] env[61972]: DEBUG oslo_vmware.api [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Task: {'id': task-1389400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440709} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.107972] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.108290] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.108545] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.108748] env[61972]: INFO nova.compute.manager [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 920.109065] env[61972]: DEBUG oslo.service.loopingcall [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.109341] env[61972]: DEBUG nova.compute.manager [-] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 920.109444] env[61972]: DEBUG nova.network.neutron [-] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 920.211203] env[61972]: INFO nova.compute.manager [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Took 1.03 seconds to deallocate network for instance. [ 920.213863] env[61972]: DEBUG nova.compute.claims [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Aborting claim: {{(pid=61972) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 920.214129] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.331159] env[61972]: INFO nova.compute.manager [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Took 18.99 seconds to build instance. [ 920.369355] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.369549] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.369733] env[61972]: DEBUG nova.network.neutron [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.505921] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389398, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.835047] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5ba502b4-2f0a-40d5-b721-fecf85b766b6 tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.514s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.007407] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389398, 'name': CloneVM_Task} progress is 95%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.102798] env[61972]: DEBUG nova.network.neutron [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [{"id": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "address": "fa:16:3e:c8:26:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd99d79a-cd", "ovs_interfaceid": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.105982] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6b8760-8fb0-4a65-84ad-b0330bf0f229 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.114341] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad83ab9-451c-40c5-9f27-4df7ef857c39 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.150785] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c1cd1e-fa86-42c0-b059-2fdb4ce8c1da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.160780] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c353796-1570-4d97-80e0-e764027259f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.177372] env[61972]: DEBUG nova.compute.provider_tree [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.373652] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52233340-8e0e-1450-9b84-271e6168c1f9/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 921.374607] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513ace0c-529c-4e2b-8b25-209bd92ef7c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.386217] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52233340-8e0e-1450-9b84-271e6168c1f9/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 921.386453] env[61972]: ERROR oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52233340-8e0e-1450-9b84-271e6168c1f9/disk-0.vmdk due to incomplete transfer. [ 921.387510] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-37ff71b2-16be-468b-9d4a-d226d0d8f1ec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.395117] env[61972]: DEBUG nova.compute.manager [req-1d491314-4acd-43ac-b749-677cf5052e10 req-443a3e6a-e062-45b3-bb35-7bf84519da4f service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Received event network-vif-deleted-6348fdb6-1e04-4d45-b3d2-e67eb05449f7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 921.395345] env[61972]: INFO nova.compute.manager [req-1d491314-4acd-43ac-b749-677cf5052e10 req-443a3e6a-e062-45b3-bb35-7bf84519da4f service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Neutron deleted interface 6348fdb6-1e04-4d45-b3d2-e67eb05449f7; detaching it from the instance and deleting it from the info cache [ 921.396138] env[61972]: DEBUG nova.network.neutron [req-1d491314-4acd-43ac-b749-677cf5052e10 req-443a3e6a-e062-45b3-bb35-7bf84519da4f service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.401382] env[61972]: DEBUG oslo_vmware.rw_handles [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52233340-8e0e-1450-9b84-271e6168c1f9/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 921.401382] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Uploaded image aad3e858-b1b6-43b1-917a-f092e48268b2 to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 921.402214] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 921.402719] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cbf8227d-1281-4fd7-9c7b-95580eb43217 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.413107] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 921.413107] env[61972]: value = "task-1389401" [ 921.413107] env[61972]: _type = "Task" [ 921.413107] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.426901] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389401, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.505751] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389398, 'name': CloneVM_Task, 'duration_secs': 1.802729} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.506043] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Created linked-clone VM from snapshot [ 921.506821] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c252bbb-2176-4876-884d-7dc8c523392f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.514144] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Uploading image dfa315fa-5bb1-45eb-9033-5c97cf525ca6 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 921.538890] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 921.538890] env[61972]: value = "vm-294886" [ 921.538890] env[61972]: _type = "VirtualMachine" [ 921.538890] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 921.539269] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8b17edc3-a502-4138-8e76-72b226401b8e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.546136] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lease: (returnval){ [ 921.546136] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5277abd2-8458-d20a-7ad8-cafd2931f9cb" [ 921.546136] env[61972]: _type = "HttpNfcLease" [ 921.546136] env[61972]: } obtained for exporting VM: (result){ [ 921.546136] env[61972]: value = "vm-294886" [ 921.546136] env[61972]: _type = "VirtualMachine" [ 921.546136] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 921.546448] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the lease: (returnval){ [ 921.546448] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5277abd2-8458-d20a-7ad8-cafd2931f9cb" [ 921.546448] env[61972]: _type = "HttpNfcLease" [ 921.546448] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 921.555140] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 921.555140] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5277abd2-8458-d20a-7ad8-cafd2931f9cb" [ 921.555140] env[61972]: _type = "HttpNfcLease" [ 921.555140] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 921.609986] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.654926] env[61972]: DEBUG nova.network.neutron [-] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.681251] env[61972]: DEBUG nova.scheduler.client.report [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 921.899627] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0eb9ca6-a25c-48bc-bfbd-ce64433c104b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.908918] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0757c6-acad-4c49-9074-85e2a17ae0e8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.927110] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389401, 'name': Destroy_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.941227] env[61972]: DEBUG nova.compute.manager [req-1d491314-4acd-43ac-b749-677cf5052e10 req-443a3e6a-e062-45b3-bb35-7bf84519da4f service nova] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Detach interface failed, port_id=6348fdb6-1e04-4d45-b3d2-e67eb05449f7, reason: Instance b9726bf4-a4b1-4b22-840f-98157d0d790c could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 922.055050] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 922.055050] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5277abd2-8458-d20a-7ad8-cafd2931f9cb" [ 922.055050] env[61972]: _type = "HttpNfcLease" [ 922.055050] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 922.055431] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 922.055431] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5277abd2-8458-d20a-7ad8-cafd2931f9cb" [ 922.055431] env[61972]: _type = "HttpNfcLease" [ 922.055431] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 922.056189] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f070086-1870-4bbd-87ee-ba5abf3104cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.063747] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52210a94-19a7-8645-98f1-168389070d4e/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 922.063979] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52210a94-19a7-8645-98f1-168389070d4e/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 922.162752] env[61972]: INFO nova.compute.manager [-] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Took 2.05 seconds to deallocate network for instance. [ 922.168155] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-10e95b35-eb0e-43c9-9834-6d27c577e634 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.186201] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.186644] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 922.189525] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.173s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.191682] env[61972]: INFO nova.compute.claims [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.369771] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "12a1a1ee-9aa1-4dda-9276-68492718e404" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.369931] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.370172] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "12a1a1ee-9aa1-4dda-9276-68492718e404-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.370358] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.370525] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.375587] env[61972]: INFO nova.compute.manager [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Terminating instance [ 922.429759] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389401, 'name': Destroy_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.672725] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.696970] env[61972]: DEBUG nova.compute.utils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 922.698369] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 922.699020] env[61972]: DEBUG nova.network.neutron [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.764326] env[61972]: DEBUG nova.policy [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3be17eae7273428782fef3d4aa7b7cce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c822f4d4b5a4575ba334521b1b9fbde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 922.881504] env[61972]: DEBUG nova.compute.manager [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 922.881504] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.882738] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252a477e-abef-44c7-b8e1-b862d1007133 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.893261] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.893592] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66e72c7a-e006-4f77-909b-8c9a6162d2ca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.899915] env[61972]: DEBUG oslo_vmware.api [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 922.899915] env[61972]: value = "task-1389403" [ 922.899915] env[61972]: _type = "Task" [ 922.899915] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.908527] env[61972]: DEBUG oslo_vmware.api [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.929584] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389401, 'name': Destroy_Task, 'duration_secs': 1.298687} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.929859] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Destroyed the VM [ 922.930280] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 922.930540] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6137d867-6dc4-4afa-9d7e-e5798ae17661 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.936528] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 922.936528] env[61972]: value = "task-1389404" [ 922.936528] env[61972]: _type = "Task" [ 922.936528] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.945157] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.145521] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17dd25f5-63d3-4463-9669-6ff133e2f2fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.180450] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 0 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 923.202340] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 923.326425] env[61972]: DEBUG nova.network.neutron [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Successfully created port: 98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.411817] env[61972]: DEBUG oslo_vmware.api [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389403, 'name': PowerOffVM_Task, 'duration_secs': 0.320551} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.412236] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.412322] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.412585] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66dbe591-2a97-4791-bd15-e3b409ff4284 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.449761] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389404, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.540484] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64659027-9831-415e-8541-675e6bf0fbb0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.549508] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d9ccaa-81e0-40f7-8e72-e0a8ddc7e5b6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.584590] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1648d388-7574-4272-b0e2-033da128d437 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.593575] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5bc4e1-0a47-4bb2-8506-f5349a2801d8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.608039] env[61972]: DEBUG nova.compute.provider_tree [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.691160] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.691495] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97a5c830-e0cd-49ce-8528-dca222a73759 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.700616] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 923.700616] env[61972]: value = "task-1389406" [ 923.700616] env[61972]: _type = "Task" [ 923.700616] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.716782] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.949831] env[61972]: DEBUG oslo_vmware.api [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389404, 'name': RemoveSnapshot_Task, 'duration_secs': 0.693166} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.950182] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 923.950451] env[61972]: INFO nova.compute.manager [None req-65141c4d-f06b-44a3-9108-cbe6181c7f69 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Took 15.79 seconds to snapshot the instance on the hypervisor. [ 924.112411] env[61972]: DEBUG nova.scheduler.client.report [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 924.210912] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389406, 'name': PowerOffVM_Task, 'duration_secs': 0.210643} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.213094] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.213094] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 17 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 924.218068] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 924.247285] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.247548] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.247708] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.247895] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.248093] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.248261] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.248491] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.248628] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.248791] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.248949] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.249445] env[61972]: DEBUG nova.virt.hardware [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.250357] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d78e50-2069-4db0-818c-c31fbb0906cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.261955] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7102fe9a-abb5-4766-973e-89b624478cac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.618203] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.428s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.618784] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 924.621483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.276s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.621855] env[61972]: DEBUG nova.objects.instance [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lazy-loading 'resources' on Instance uuid 0cd09167-2c2f-4cad-b26d-35aa208fbf79 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.718587] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.718865] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.719038] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.719259] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.719376] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.719526] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.719731] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.719890] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.720077] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.720251] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.720533] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.726092] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9507c4a7-cc1a-49c7-bf3a-bb8c83d5e1ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.741136] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.741389] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.741581] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Deleting the datastore file [datastore1] 12a1a1ee-9aa1-4dda-9276-68492718e404 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.741847] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c9823d8-68fc-40e3-9dc1-5053af1fc2db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.744900] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 924.744900] env[61972]: value = "task-1389407" [ 924.744900] env[61972]: _type = "Task" [ 924.744900] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.749591] env[61972]: DEBUG oslo_vmware.api [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for the task: (returnval){ [ 924.749591] env[61972]: value = "task-1389408" [ 924.749591] env[61972]: _type = "Task" [ 924.749591] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.755685] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389407, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.760922] env[61972]: DEBUG oslo_vmware.api [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.124432] env[61972]: DEBUG nova.compute.utils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.134434] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 925.134434] env[61972]: DEBUG nova.network.neutron [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.176145] env[61972]: DEBUG nova.policy [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6b7c5b037a54c8cbd151ad0f1875f37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbbaa322b60942819cfb147b5201daf4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 925.203521] env[61972]: DEBUG nova.compute.manager [req-b531d84a-65b5-429f-ba0c-d828c04b1550 req-08bb59bd-02ea-4f5e-bcec-37b6e0df80f8 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Received event network-vif-plugged-98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 925.203521] env[61972]: DEBUG oslo_concurrency.lockutils [req-b531d84a-65b5-429f-ba0c-d828c04b1550 req-08bb59bd-02ea-4f5e-bcec-37b6e0df80f8 service nova] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.203521] env[61972]: DEBUG oslo_concurrency.lockutils [req-b531d84a-65b5-429f-ba0c-d828c04b1550 req-08bb59bd-02ea-4f5e-bcec-37b6e0df80f8 service nova] Lock "d2864436-05a3-421f-98fd-41df925727c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.203521] env[61972]: DEBUG oslo_concurrency.lockutils [req-b531d84a-65b5-429f-ba0c-d828c04b1550 req-08bb59bd-02ea-4f5e-bcec-37b6e0df80f8 service nova] Lock "d2864436-05a3-421f-98fd-41df925727c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.203521] env[61972]: DEBUG nova.compute.manager [req-b531d84a-65b5-429f-ba0c-d828c04b1550 req-08bb59bd-02ea-4f5e-bcec-37b6e0df80f8 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] No waiting events found dispatching network-vif-plugged-98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 925.203965] env[61972]: WARNING nova.compute.manager [req-b531d84a-65b5-429f-ba0c-d828c04b1550 req-08bb59bd-02ea-4f5e-bcec-37b6e0df80f8 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Received unexpected event network-vif-plugged-98807bc5-c5af-4bd9-ad5e-8c3043878d76 for instance with vm_state building and task_state spawning. [ 925.262823] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389407, 'name': ReconfigVM_Task, 'duration_secs': 0.214763} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.264744] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 33 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 925.274878] env[61972]: DEBUG oslo_vmware.api [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Task: {'id': task-1389408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139613} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.278913] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.279509] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.279857] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.280350] env[61972]: INFO nova.compute.manager [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Took 2.40 seconds to destroy the instance on the hypervisor. [ 925.280790] env[61972]: DEBUG oslo.service.loopingcall [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.281326] env[61972]: DEBUG nova.compute.manager [-] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 925.281539] env[61972]: DEBUG nova.network.neutron [-] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.421221] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ffbd98-7e14-4829-b4cc-d38d23cf5586 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.430031] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f2dc71-c124-415b-b8b0-0726d5224cba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.461534] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b3cc79-65ff-4b32-ab56-83ea498c99de {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.469608] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabf8bc5-f387-4aa6-b483-e10183bdb77a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.484127] env[61972]: DEBUG nova.compute.provider_tree [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.621969] env[61972]: DEBUG nova.network.neutron [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Successfully created port: 36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.636012] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 925.745796] env[61972]: DEBUG nova.compute.manager [req-d3abee4f-0622-4651-ac1a-6f882e781660 req-ff7f77e8-f3ee-45cd-85da-d811c79ea95e service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Received event network-vif-deleted-2b837067-f779-43d6-9b1c-302a175bf675 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 925.745903] env[61972]: INFO nova.compute.manager [req-d3abee4f-0622-4651-ac1a-6f882e781660 req-ff7f77e8-f3ee-45cd-85da-d811c79ea95e service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Neutron deleted interface 2b837067-f779-43d6-9b1c-302a175bf675; detaching it from the instance and deleting it from the info cache [ 925.746226] env[61972]: DEBUG nova.network.neutron [req-d3abee4f-0622-4651-ac1a-6f882e781660 req-ff7f77e8-f3ee-45cd-85da-d811c79ea95e service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.773841] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 925.773841] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 925.774078] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.774290] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 925.774441] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.774768] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 925.774814] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 925.774962] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 925.775355] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 925.775568] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 925.775750] env[61972]: DEBUG nova.virt.hardware [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 925.783570] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 925.785025] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47f08b08-6b02-4b0d-8d68-9633df97eb30 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.802983] env[61972]: DEBUG nova.network.neutron [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Successfully updated port: 98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.810644] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 925.810644] env[61972]: value = "task-1389409" [ 925.810644] env[61972]: _type = "Task" [ 925.810644] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.821146] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389409, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.988133] env[61972]: DEBUG nova.scheduler.client.report [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 926.023848] env[61972]: DEBUG nova.compute.manager [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Received event network-changed-98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 926.024030] env[61972]: DEBUG nova.compute.manager [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Refreshing instance network info cache due to event network-changed-98807bc5-c5af-4bd9-ad5e-8c3043878d76. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 926.024217] env[61972]: DEBUG oslo_concurrency.lockutils [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] Acquiring lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.024374] env[61972]: DEBUG oslo_concurrency.lockutils [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] Acquired lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.024541] env[61972]: DEBUG nova.network.neutron [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Refreshing network info cache for port 98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.194739] env[61972]: DEBUG nova.network.neutron [-] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.250086] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9aa26b1-a87a-419b-9516-8c0dd6dd21f6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.259373] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599641e2-c2c5-41e4-8c75-3c52ee71daeb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.290719] env[61972]: DEBUG nova.compute.manager [req-d3abee4f-0622-4651-ac1a-6f882e781660 req-ff7f77e8-f3ee-45cd-85da-d811c79ea95e service nova] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Detach interface failed, port_id=2b837067-f779-43d6-9b1c-302a175bf675, reason: Instance 12a1a1ee-9aa1-4dda-9276-68492718e404 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 926.305798] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.321938] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389409, 'name': ReconfigVM_Task, 'duration_secs': 0.24443} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.322367] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 926.323654] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c61183e-9992-4417-9a64-9016e578dfe9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.346956] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503/84e07f61-2111-43cb-93a2-9cb47ac52503.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.347342] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-292b2351-b78b-40f8-87b5-a48e8c7f64b7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.366991] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 926.366991] env[61972]: value = "task-1389410" [ 926.366991] env[61972]: _type = "Task" [ 926.366991] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.375991] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.494292] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.873s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.497263] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.023s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.498331] env[61972]: DEBUG nova.objects.instance [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lazy-loading 'resources' on Instance uuid 21440243-458c-4640-b0ba-8f3b8b1b0720 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.522022] env[61972]: INFO nova.scheduler.client.report [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted allocations for instance 0cd09167-2c2f-4cad-b26d-35aa208fbf79 [ 926.558534] env[61972]: DEBUG nova.network.neutron [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.647844] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 926.674416] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.674690] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.674878] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.675100] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.675259] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.675428] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.675666] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.675835] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.676063] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.676271] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.676479] env[61972]: DEBUG nova.virt.hardware [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.677421] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ed741c-12c9-4383-93ea-76aa10efc171 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.681269] env[61972]: DEBUG nova.network.neutron [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.686321] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e6354c-cc1b-4a07-a505-14afb473d5ef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.703065] env[61972]: INFO nova.compute.manager [-] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Took 1.42 seconds to deallocate network for instance. [ 926.793317] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "94bd64b9-3d20-4631-baed-4500f9beb9c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.793317] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.793317] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "94bd64b9-3d20-4631-baed-4500f9beb9c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.793317] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.793667] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.795647] env[61972]: INFO nova.compute.manager [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Terminating instance [ 926.878165] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389410, 'name': ReconfigVM_Task, 'duration_secs': 0.271422} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.878570] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503/84e07f61-2111-43cb-93a2-9cb47ac52503.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.878863] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 50 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 927.032702] env[61972]: DEBUG oslo_concurrency.lockutils [None req-547d1991-feb7-4d58-9d87-d5034b1e5bea tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "0cd09167-2c2f-4cad-b26d-35aa208fbf79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.602s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.166435] env[61972]: DEBUG nova.network.neutron [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Successfully updated port: 36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.186878] env[61972]: DEBUG oslo_concurrency.lockutils [req-619337e8-08f4-4810-a7fa-f7d7422e42a2 req-8ef4d618-931b-4991-899c-089bdedf6df5 service nova] Releasing lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.187333] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.187538] env[61972]: DEBUG nova.network.neutron [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.211093] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.224615] env[61972]: DEBUG nova.network.neutron [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.248841] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807ce1bc-642a-4b0e-8809-8ef31c27be38 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.256691] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c75b878-e875-485c-b913-25d9b02cb78b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.290075] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812c1717-d30c-457c-97a7-d58d7d382d41 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.298195] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67248713-ce3b-4cb2-b439-4cc08e81adfa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.304735] env[61972]: DEBUG nova.compute.manager [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 927.304968] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.305769] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bd9ad5-7885-437d-b098-ac937abb1d3b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.318277] env[61972]: DEBUG nova.compute.provider_tree [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.323381] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.325021] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-781fce2e-9882-42f2-b234-bdb13b3e5e5f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.335439] env[61972]: DEBUG oslo_vmware.api [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 927.335439] env[61972]: value = "task-1389411" [ 927.335439] env[61972]: _type = "Task" [ 927.335439] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.341655] env[61972]: DEBUG oslo_vmware.api [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.386207] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9042809e-8ac1-4b66-89fd-dc62d84fd58c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.406622] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56236ea8-b254-4da0-8c10-d40f3a8da418 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.425976] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 67 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 927.430359] env[61972]: DEBUG nova.network.neutron [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [{"id": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "address": "fa:16:3e:5b:7e:e2", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98807bc5-c5", "ovs_interfaceid": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.669715] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "refresh_cache-b03b1fe7-2eda-4505-a6f9-19c570b15d1e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.671374] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "refresh_cache-b03b1fe7-2eda-4505-a6f9-19c570b15d1e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.671374] env[61972]: DEBUG nova.network.neutron [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.821814] env[61972]: DEBUG nova.compute.manager [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Received event network-vif-plugged-36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 927.821814] env[61972]: DEBUG oslo_concurrency.lockutils [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] Acquiring lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.821894] env[61972]: DEBUG oslo_concurrency.lockutils [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.822074] env[61972]: DEBUG oslo_concurrency.lockutils [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.822323] env[61972]: DEBUG nova.compute.manager [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] No waiting events found dispatching network-vif-plugged-36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 927.822535] env[61972]: WARNING nova.compute.manager [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Received unexpected event network-vif-plugged-36f8ea5c-3a0e-465b-86ad-a380d3b8f573 for instance with vm_state building and task_state spawning. [ 927.822715] env[61972]: DEBUG nova.compute.manager [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Received event network-changed-36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 927.822927] env[61972]: DEBUG nova.compute.manager [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Refreshing instance network info cache due to event network-changed-36f8ea5c-3a0e-465b-86ad-a380d3b8f573. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 927.823139] env[61972]: DEBUG oslo_concurrency.lockutils [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] Acquiring lock "refresh_cache-b03b1fe7-2eda-4505-a6f9-19c570b15d1e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.823882] env[61972]: DEBUG nova.scheduler.client.report [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 927.843806] env[61972]: DEBUG oslo_vmware.api [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389411, 'name': PowerOffVM_Task, 'duration_secs': 0.177037} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.844719] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.844914] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.845238] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8da5f10c-b62f-4cff-830a-c92ec438a3c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.912303] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.912653] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.912914] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Deleting the datastore file [datastore2] 94bd64b9-3d20-4631-baed-4500f9beb9c2 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.913241] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b203fde5-80ef-421b-ad39-f4c06aa1e617 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.919446] env[61972]: DEBUG oslo_vmware.api [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 927.919446] env[61972]: value = "task-1389413" [ 927.919446] env[61972]: _type = "Task" [ 927.919446] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.929664] env[61972]: DEBUG oslo_vmware.api [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.933503] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.933656] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Instance network_info: |[{"id": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "address": "fa:16:3e:5b:7e:e2", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98807bc5-c5", "ovs_interfaceid": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 927.937221] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:7e:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98807bc5-c5af-4bd9-ad5e-8c3043878d76', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.944602] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Creating folder: Project (1c822f4d4b5a4575ba334521b1b9fbde). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 927.944902] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-358f5cda-f18d-4b26-97cc-afc8e79d4bf4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.955732] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Created folder: Project (1c822f4d4b5a4575ba334521b1b9fbde) in parent group-v294799. [ 927.956161] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Creating folder: Instances. Parent ref: group-v294887. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 927.956451] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2721f265-ba89-4178-a149-8ee7dee9ca27 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.967419] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Created folder: Instances in parent group-v294887. [ 927.967673] env[61972]: DEBUG oslo.service.loopingcall [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.967871] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2864436-05a3-421f-98fd-41df925727c6] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.968100] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5707ce7-7c76-40a0-9885-fb60df579c09 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.982961] env[61972]: DEBUG nova.network.neutron [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Port dd99d79a-cd6c-477b-88f4-45e9d019f331 binding to destination host cpu-1 is already ACTIVE {{(pid=61972) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 927.989058] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.989058] env[61972]: value = "task-1389416" [ 927.989058] env[61972]: _type = "Task" [ 927.989058] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.997550] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389416, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.206034] env[61972]: DEBUG nova.network.neutron [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.329542] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.333021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.585s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.333408] env[61972]: INFO nova.compute.claims [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.358139] env[61972]: INFO nova.scheduler.client.report [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Deleted allocations for instance 21440243-458c-4640-b0ba-8f3b8b1b0720 [ 928.366753] env[61972]: DEBUG nova.network.neutron [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Updating instance_info_cache with network_info: [{"id": "36f8ea5c-3a0e-465b-86ad-a380d3b8f573", "address": "fa:16:3e:b2:42:00", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36f8ea5c-3a", "ovs_interfaceid": "36f8ea5c-3a0e-465b-86ad-a380d3b8f573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.430301] env[61972]: DEBUG oslo_vmware.api [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143298} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.430586] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.430798] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.430993] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.431188] env[61972]: INFO nova.compute.manager [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 928.431445] env[61972]: DEBUG oslo.service.loopingcall [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.431648] env[61972]: DEBUG nova.compute.manager [-] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 928.431744] env[61972]: DEBUG nova.network.neutron [-] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.498904] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389416, 'name': CreateVM_Task, 'duration_secs': 0.337236} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.499113] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2864436-05a3-421f-98fd-41df925727c6] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.499790] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.499966] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.500590] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 928.500900] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b458ffb0-6f95-4da2-8e8c-e9113cb901a3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.506560] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 928.506560] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f89870-a204-a9cf-4324-369ba9bd8e4c" [ 928.506560] env[61972]: _type = "Task" [ 928.506560] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.516110] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f89870-a204-a9cf-4324-369ba9bd8e4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.806394] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52210a94-19a7-8645-98f1-168389070d4e/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 928.807346] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6582128-28cc-4aaa-afa7-500db29e95ed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.814791] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52210a94-19a7-8645-98f1-168389070d4e/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 928.814957] env[61972]: ERROR oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52210a94-19a7-8645-98f1-168389070d4e/disk-0.vmdk due to incomplete transfer. [ 928.815335] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ea576172-c8f9-4be6-a1c5-6281f8855eff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.822833] env[61972]: DEBUG oslo_vmware.rw_handles [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52210a94-19a7-8645-98f1-168389070d4e/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 928.823122] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Uploaded image dfa315fa-5bb1-45eb-9033-5c97cf525ca6 to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 928.826846] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 928.826939] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-244694c0-4808-4a9b-91b9-4de6f56364ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.834812] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 928.834812] env[61972]: value = "task-1389417" [ 928.834812] env[61972]: _type = "Task" [ 928.834812] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.846806] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389417, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.865831] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a72381f9-dc94-4f57-849c-df1c02ddddfe tempest-AttachInterfacesUnderV243Test-1740974403 tempest-AttachInterfacesUnderV243Test-1740974403-project-member] Lock "21440243-458c-4640-b0ba-8f3b8b1b0720" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.246s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.868736] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "refresh_cache-b03b1fe7-2eda-4505-a6f9-19c570b15d1e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.869052] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Instance network_info: |[{"id": "36f8ea5c-3a0e-465b-86ad-a380d3b8f573", "address": "fa:16:3e:b2:42:00", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36f8ea5c-3a", "ovs_interfaceid": "36f8ea5c-3a0e-465b-86ad-a380d3b8f573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 928.869363] env[61972]: DEBUG oslo_concurrency.lockutils [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] Acquired lock "refresh_cache-b03b1fe7-2eda-4505-a6f9-19c570b15d1e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.869548] env[61972]: DEBUG nova.network.neutron [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Refreshing network info cache for port 36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.873019] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:42:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '838c9497-35dd-415e-96c7-8dc21b0cd4b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36f8ea5c-3a0e-465b-86ad-a380d3b8f573', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.877991] env[61972]: DEBUG oslo.service.loopingcall [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.879394] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.879629] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94682d0e-7a9e-4797-a27c-da1cef94365b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.899789] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.899789] env[61972]: value = "task-1389418" [ 928.899789] env[61972]: _type = "Task" [ 928.899789] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.909246] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389418, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.008806] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.009066] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.009249] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.020216] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f89870-a204-a9cf-4324-369ba9bd8e4c, 'name': SearchDatastore_Task, 'duration_secs': 0.015237} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.021417] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.021689] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.021928] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.022123] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.022455] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.022872] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bf07923-5d4a-44cf-b07f-ffae88d34ad6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.031674] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.031863] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.032700] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62a3f86-e205-4d53-88dd-fb28ec68994a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.038474] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 929.038474] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f5bbb3-8cdb-cfbe-8c57-5faa76eca5c3" [ 929.038474] env[61972]: _type = "Task" [ 929.038474] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.046739] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f5bbb3-8cdb-cfbe-8c57-5faa76eca5c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.217347] env[61972]: DEBUG nova.network.neutron [-] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.348622] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389417, 'name': Destroy_Task, 'duration_secs': 0.463066} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.348622] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Destroyed the VM [ 929.348622] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 929.348622] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5678d7ab-8c8a-4714-b488-b6ca552986a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.354876] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 929.354876] env[61972]: value = "task-1389419" [ 929.354876] env[61972]: _type = "Task" [ 929.354876] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.364399] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389419, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.410567] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389418, 'name': CreateVM_Task, 'duration_secs': 0.417936} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.410832] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.411437] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.411602] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.411916] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 929.412257] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a65a6993-1c7d-4200-a7a0-d33b8dd8832c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.416658] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 929.416658] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]523d905f-0af7-0092-8561-e90fb75c16ae" [ 929.416658] env[61972]: _type = "Task" [ 929.416658] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.425195] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523d905f-0af7-0092-8561-e90fb75c16ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.550533] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f5bbb3-8cdb-cfbe-8c57-5faa76eca5c3, 'name': SearchDatastore_Task, 'duration_secs': 0.008997} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.556434] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-608684d9-94c8-4fd3-8540-beba3970b35b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.562166] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 929.562166] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]523e8a85-70b0-7387-51b7-1e81460c7bea" [ 929.562166] env[61972]: _type = "Task" [ 929.562166] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.572123] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523e8a85-70b0-7387-51b7-1e81460c7bea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.600955] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6cccab-af22-4603-9696-b9b6a3ceffa7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.608192] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a07954-40f1-415e-bc87-f14116aee6e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.639217] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a7e1b5-fed2-42bc-a66a-cb1dc5f13574 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.648759] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fc712b-26d2-4f0d-9357-76e95d4075fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.663645] env[61972]: DEBUG nova.compute.provider_tree [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.681378] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.681648] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.681828] env[61972]: INFO nova.compute.manager [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Shelving [ 929.702949] env[61972]: DEBUG nova.network.neutron [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Updated VIF entry in instance network info cache for port 36f8ea5c-3a0e-465b-86ad-a380d3b8f573. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.703370] env[61972]: DEBUG nova.network.neutron [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Updating instance_info_cache with network_info: [{"id": "36f8ea5c-3a0e-465b-86ad-a380d3b8f573", "address": "fa:16:3e:b2:42:00", "network": {"id": "6a45631a-6ab1-4436-9912-52af55ba08fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1562639470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbbaa322b60942819cfb147b5201daf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36f8ea5c-3a", "ovs_interfaceid": "36f8ea5c-3a0e-465b-86ad-a380d3b8f573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.720113] env[61972]: INFO nova.compute.manager [-] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Took 1.29 seconds to deallocate network for instance. [ 929.867323] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389419, 'name': RemoveSnapshot_Task, 'duration_secs': 0.380245} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.867648] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 929.867853] env[61972]: DEBUG nova.compute.manager [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 929.868648] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45156b7e-2e69-4ce2-98ae-570566015ddb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.926443] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523d905f-0af7-0092-8561-e90fb75c16ae, 'name': SearchDatastore_Task, 'duration_secs': 0.0097} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.927362] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.927362] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.927362] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.998867] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.998867] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.998867] env[61972]: INFO nova.compute.manager [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Shelving [ 930.002827] env[61972]: DEBUG nova.compute.manager [req-4fa7d34b-1e6f-4d23-9d89-1560d09b6b60 req-db02df29-9c2e-489f-8a5d-430cc89d21eb service nova] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Received event network-vif-deleted-c8a279aa-ff18-41b6-9384-5364aea002e9 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 930.060633] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.060831] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.061023] env[61972]: DEBUG nova.network.neutron [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.072138] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523e8a85-70b0-7387-51b7-1e81460c7bea, 'name': SearchDatastore_Task, 'duration_secs': 0.009482} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.072390] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.072633] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] d2864436-05a3-421f-98fd-41df925727c6/d2864436-05a3-421f-98fd-41df925727c6.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.072947] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.073166] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.073387] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-246970b9-7676-4886-883d-8a28cfdd1e47 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.075485] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-007ed264-5c22-4308-bdcb-4ca678074d00 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.081206] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 930.081206] env[61972]: value = "task-1389420" [ 930.081206] env[61972]: _type = "Task" [ 930.081206] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.085959] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.086161] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.089326] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ef75c3-a5fa-4277-8eab-2313c89785ec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.091337] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389420, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.094205] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 930.094205] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cc033a-ea64-e2f8-1008-d07d71b08e9b" [ 930.094205] env[61972]: _type = "Task" [ 930.094205] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.101036] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cc033a-ea64-e2f8-1008-d07d71b08e9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.167474] env[61972]: DEBUG nova.scheduler.client.report [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 930.205520] env[61972]: DEBUG oslo_concurrency.lockutils [req-08ccb43d-a24a-4391-8958-a3f9c5cf8831 req-0b6178f1-1490-4550-bef4-0f7918a296ff service nova] Releasing lock "refresh_cache-b03b1fe7-2eda-4505-a6f9-19c570b15d1e" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.226091] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.380350] env[61972]: INFO nova.compute.manager [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Shelve offloading [ 930.592045] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389420, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.606268] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cc033a-ea64-e2f8-1008-d07d71b08e9b, 'name': SearchDatastore_Task, 'duration_secs': 0.00857} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.607165] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3baf413-38ce-476e-9d12-eb88576cf66a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.612630] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 930.612630] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5212a727-850f-1067-97e4-2f42bcfcc3b3" [ 930.612630] env[61972]: _type = "Task" [ 930.612630] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.620586] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5212a727-850f-1067-97e4-2f42bcfcc3b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.672960] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.673672] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 930.678882] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.465s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.694024] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.694024] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff2ffcf8-8266-4b95-be98-468473169d76 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.698043] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 930.698043] env[61972]: value = "task-1389421" [ 930.698043] env[61972]: _type = "Task" [ 930.698043] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.715022] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.801643] env[61972]: DEBUG nova.network.neutron [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [{"id": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "address": "fa:16:3e:c8:26:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd99d79a-cd", "ovs_interfaceid": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.884401] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.884730] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-678e784d-8c17-4e1a-9e37-328f9ae67606 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.891951] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 930.891951] env[61972]: value = "task-1389422" [ 930.891951] env[61972]: _type = "Task" [ 930.891951] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.901489] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] VM already powered off {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 930.901722] env[61972]: DEBUG nova.compute.manager [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 930.902496] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b42916-3eaa-403d-8e27-68773b0ec0fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.908200] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.908372] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.908546] env[61972]: DEBUG nova.network.neutron [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.009421] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.009754] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6f0a932-df5e-48e6-9a04-c75dcdfc9a79 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.017746] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 931.017746] env[61972]: value = "task-1389423" [ 931.017746] env[61972]: _type = "Task" [ 931.017746] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.026980] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.091929] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389420, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552417} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.092380] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] d2864436-05a3-421f-98fd-41df925727c6/d2864436-05a3-421f-98fd-41df925727c6.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.092481] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.092716] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba006141-88e8-4e10-bcb0-fc4422cdeaf0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.099834] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 931.099834] env[61972]: value = "task-1389424" [ 931.099834] env[61972]: _type = "Task" [ 931.099834] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.108018] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.123536] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5212a727-850f-1067-97e4-2f42bcfcc3b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009943} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.123817] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.124145] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] b03b1fe7-2eda-4505-a6f9-19c570b15d1e/b03b1fe7-2eda-4505-a6f9-19c570b15d1e.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.124431] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca606010-30be-4ea7-b550-713c67167c44 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.133764] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 931.133764] env[61972]: value = "task-1389425" [ 931.133764] env[61972]: _type = "Task" [ 931.133764] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.141940] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.186412] env[61972]: DEBUG nova.compute.utils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 931.188768] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 931.188947] env[61972]: DEBUG nova.network.neutron [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.210551] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389421, 'name': PowerOffVM_Task, 'duration_secs': 0.22134} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.213648] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.215045] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4eaf131-9fdc-40cd-845f-dae8a62dc170 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.239122] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550f2aec-dcf4-4504-8d1f-8502b32ede4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.262799] env[61972]: DEBUG nova.policy [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 931.304354] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.433749] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459720fa-f90d-4ff9-a144-9de19ef70872 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.445030] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbee2f5c-22f1-4070-9130-4e793b3549bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.480967] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0b9c3e-65db-4bcc-a26b-5c322fc34223 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.489484] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e7caf3-9276-4af6-98ab-b26a3b298aa1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.505041] env[61972]: DEBUG nova.compute.provider_tree [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.533182] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389423, 'name': PowerOffVM_Task, 'duration_secs': 0.200993} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.533182] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.533182] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b36cc59-1f2d-42a7-8c05-217916b96d61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.556151] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c067ecff-3f3f-479c-9e60-7b3fbbf71622 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.613497] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069361} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.615036] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.615036] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f32ffb-f922-4cd6-afab-8ad4550cf409 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.639261] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] d2864436-05a3-421f-98fd-41df925727c6/d2864436-05a3-421f-98fd-41df925727c6.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.643695] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fb2ca8f-88ef-4ec5-a20b-618a3e3ed0e3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.669712] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389425, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52673} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.671104] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] b03b1fe7-2eda-4505-a6f9-19c570b15d1e/b03b1fe7-2eda-4505-a6f9-19c570b15d1e.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.671394] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.671751] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 931.671751] env[61972]: value = "task-1389426" [ 931.671751] env[61972]: _type = "Task" [ 931.671751] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.672021] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68c391ef-e02a-4e1c-bb9c-61b829b892af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.682932] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389426, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.684598] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 931.684598] env[61972]: value = "task-1389427" [ 931.684598] env[61972]: _type = "Task" [ 931.684598] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.689271] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 931.700337] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389427, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.734689] env[61972]: DEBUG nova.network.neutron [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Successfully created port: afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.749767] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 931.750120] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c62c7ad0-39dc-4c89-a223-ecbc24f6f02a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.757675] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 931.757675] env[61972]: value = "task-1389428" [ 931.757675] env[61972]: _type = "Task" [ 931.757675] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.767207] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389428, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.839444] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30791fe5-2fea-4f41-8633-808797544085 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.862305] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73231446-c765-4b39-baa8-1f50103fef0d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.866924] env[61972]: DEBUG nova.network.neutron [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updating instance_info_cache with network_info: [{"id": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "address": "fa:16:3e:a9:1d:15", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e9a7e3-a8", "ovs_interfaceid": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.873787] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 83 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 931.997219] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.997813] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.997813] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 931.997952] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 932.008464] env[61972]: DEBUG nova.scheduler.client.report [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 932.067965] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 932.068355] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6b8184e1-c784-4e09-b215-5717032cb6a5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.076987] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 932.076987] env[61972]: value = "task-1389429" [ 932.076987] env[61972]: _type = "Task" [ 932.076987] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.089236] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389429, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.184146] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389426, 'name': ReconfigVM_Task, 'duration_secs': 0.31675} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.184482] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] d2864436-05a3-421f-98fd-41df925727c6/d2864436-05a3-421f-98fd-41df925727c6.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.185288] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9300ad85-9f19-467c-91cc-fe2b0f499da5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.198604] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389427, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068416} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.202037] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.202037] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 932.202037] env[61972]: value = "task-1389430" [ 932.202037] env[61972]: _type = "Task" [ 932.202037] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.202037] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26084975-0c84-4f7e-a5b3-afa66754d7ec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.224448] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389430, 'name': Rename_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.234009] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] b03b1fe7-2eda-4505-a6f9-19c570b15d1e/b03b1fe7-2eda-4505-a6f9-19c570b15d1e.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.235096] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bda4185-cac3-4f91-a455-bcabf58d8414 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.255633] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 932.255633] env[61972]: value = "task-1389431" [ 932.255633] env[61972]: _type = "Task" [ 932.255633] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.267681] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389431, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.270740] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389428, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.380027] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.384239] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.384955] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1dbf462-9ada-48dc-8b83-c01ce8351e31 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.391615] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 932.391615] env[61972]: value = "task-1389432" [ 932.391615] env[61972]: _type = "Task" [ 932.391615] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.401011] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.505097] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: d2864436-05a3-421f-98fd-41df925727c6] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 932.505412] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 932.505597] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 65c02563-a348-4415-bb21-3d3711202838] Skipping network cache update for instance because it is Building. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 932.515424] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.836s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.516168] env[61972]: ERROR nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Traceback (most recent call last): [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self.driver.spawn(context, instance, image_meta, [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] vm_ref = self.build_virtual_machine(instance, [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 932.516168] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] for vif in network_info: [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return self._sync_wrapper(fn, *args, **kwargs) [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self.wait() [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self[:] = self._gt.wait() [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return self._exit_event.wait() [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] result = hub.switch() [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 932.516561] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return self.greenlet.switch() [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] result = function(*args, **kwargs) [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] return func(*args, **kwargs) [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] raise e [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] nwinfo = self.network_api.allocate_for_instance( [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] created_port_ids = self._update_ports_for_instance( [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] with excutils.save_and_reraise_exception(): [ 932.516972] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] self.force_reraise() [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] raise self.value [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] updated_port = self._update_port( [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] _ensure_no_port_binding_failure(port) [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] raise exception.PortBindingFailed(port_id=port['id']) [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] nova.exception.PortBindingFailed: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. [ 932.517374] env[61972]: ERROR nova.compute.manager [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] [ 932.517722] env[61972]: DEBUG nova.compute.utils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. {{(pid=61972) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 932.518189] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.846s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.518420] env[61972]: DEBUG nova.objects.instance [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lazy-loading 'resources' on Instance uuid b9726bf4-a4b1-4b22-840f-98157d0d790c {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.519576] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Build of instance a77d41aa-13ba-4d26-b5fd-4928891948ce was re-scheduled: Binding failed for port a827c64c-a00d-4d2d-af6e-29c34ca4b899, please check neutron logs for more information. {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 932.520030] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Unplugging VIFs for instance {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 932.520272] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.520500] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.520684] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.534680] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.534828] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquired lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.534971] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Forcefully refreshing network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 932.535307] env[61972]: DEBUG nova.objects.instance [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lazy-loading 'info_cache' on Instance uuid e2b6dd4e-b639-4553-a45f-87c155506ea3 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.587941] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389429, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.665228] env[61972]: DEBUG nova.compute.manager [req-93ee5d12-2a55-4aa5-9954-632e3cf246cd req-8c5ff2a8-e119-4d82-89b6-cb2a68cb86bb service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Received event network-vif-unplugged-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 932.665361] env[61972]: DEBUG oslo_concurrency.lockutils [req-93ee5d12-2a55-4aa5-9954-632e3cf246cd req-8c5ff2a8-e119-4d82-89b6-cb2a68cb86bb service nova] Acquiring lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.665578] env[61972]: DEBUG oslo_concurrency.lockutils [req-93ee5d12-2a55-4aa5-9954-632e3cf246cd req-8c5ff2a8-e119-4d82-89b6-cb2a68cb86bb service nova] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.665806] env[61972]: DEBUG oslo_concurrency.lockutils [req-93ee5d12-2a55-4aa5-9954-632e3cf246cd req-8c5ff2a8-e119-4d82-89b6-cb2a68cb86bb service nova] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.665948] env[61972]: DEBUG nova.compute.manager [req-93ee5d12-2a55-4aa5-9954-632e3cf246cd req-8c5ff2a8-e119-4d82-89b6-cb2a68cb86bb service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] No waiting events found dispatching network-vif-unplugged-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 932.666126] env[61972]: WARNING nova.compute.manager [req-93ee5d12-2a55-4aa5-9954-632e3cf246cd req-8c5ff2a8-e119-4d82-89b6-cb2a68cb86bb service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Received unexpected event network-vif-unplugged-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb for instance with vm_state shelved and task_state shelving_offloading. [ 932.690779] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.691848] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84fbd92-65c2-4fb9-bb86-03c9c01b4eb4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.700080] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.701340] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 932.703310] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-718d4e99-5807-4662-ba76-2975d1d51400 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.713792] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389430, 'name': Rename_Task, 'duration_secs': 0.143464} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.714107] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.714359] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d895337d-5fad-4a22-90bb-0de02ba653cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.720671] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 932.720671] env[61972]: value = "task-1389434" [ 932.720671] env[61972]: _type = "Task" [ 932.720671] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.727731] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.727990] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.728144] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.728337] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.728498] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.728656] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.728877] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.729067] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.729246] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.729425] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.729610] env[61972]: DEBUG nova.virt.hardware [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.730445] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ace0790-df79-47f3-8818-55567bfd5f49 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.739638] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.742940] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85d3e1f-49eb-45d4-9e3d-082f66d98abd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.770019] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389431, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.776734] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389428, 'name': CreateSnapshot_Task, 'duration_secs': 0.978497} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.777151] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 932.778049] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c851fb-7305-432a-9c3e-1f943d73a044 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.793329] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.793642] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.793778] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleting the datastore file [datastore1] c274f675-f45e-49e7-8bf3-582a6977d95c {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.794033] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccb1b28d-740c-4bc3-ba84-1d64ce21cfe2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.800685] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 932.800685] env[61972]: value = "task-1389435" [ 932.800685] env[61972]: _type = "Task" [ 932.800685] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.809801] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.902683] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389432, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.054974] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.090641] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389429, 'name': CreateSnapshot_Task, 'duration_secs': 0.853336} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.090943] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 933.091815] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e25cf6-ec1a-40c1-8f44-be5c969a2965 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.170949] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.231159] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389434, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.248643] env[61972]: DEBUG nova.compute.manager [req-03e7d444-471d-4e1a-abad-1a1d1191f65c req-f1d58b72-a1e0-42d4-957f-81b49e789b44 service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Received event network-vif-plugged-afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 933.251293] env[61972]: DEBUG oslo_concurrency.lockutils [req-03e7d444-471d-4e1a-abad-1a1d1191f65c req-f1d58b72-a1e0-42d4-957f-81b49e789b44 service nova] Acquiring lock "65c02563-a348-4415-bb21-3d3711202838-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.251293] env[61972]: DEBUG oslo_concurrency.lockutils [req-03e7d444-471d-4e1a-abad-1a1d1191f65c req-f1d58b72-a1e0-42d4-957f-81b49e789b44 service nova] Lock "65c02563-a348-4415-bb21-3d3711202838-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.251293] env[61972]: DEBUG oslo_concurrency.lockutils [req-03e7d444-471d-4e1a-abad-1a1d1191f65c req-f1d58b72-a1e0-42d4-957f-81b49e789b44 service nova] Lock "65c02563-a348-4415-bb21-3d3711202838-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.251293] env[61972]: DEBUG nova.compute.manager [req-03e7d444-471d-4e1a-abad-1a1d1191f65c req-f1d58b72-a1e0-42d4-957f-81b49e789b44 service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] No waiting events found dispatching network-vif-plugged-afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.251293] env[61972]: WARNING nova.compute.manager [req-03e7d444-471d-4e1a-abad-1a1d1191f65c req-f1d58b72-a1e0-42d4-957f-81b49e789b44 service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Received unexpected event network-vif-plugged-afa659ca-956e-457f-8091-aa362b2ef1e3 for instance with vm_state building and task_state spawning. [ 933.259122] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37a3d02-952e-4dc5-b236-a178679dee61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.269585] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389431, 'name': ReconfigVM_Task, 'duration_secs': 0.531201} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.271495] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Reconfigured VM instance instance-00000058 to attach disk [datastore1] b03b1fe7-2eda-4505-a6f9-19c570b15d1e/b03b1fe7-2eda-4505-a6f9-19c570b15d1e.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.272450] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-706d805c-f31d-4822-8bf0-08499be8c7b0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.274532] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc96040-2938-4770-9073-bbde06f9312c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.313262] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 933.315249] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0dc41013-b93d-4e39-9564-dca80da8688d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.321929] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7217d6e-2c0c-456f-aa65-459248408671 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.324624] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 933.324624] env[61972]: value = "task-1389436" [ 933.324624] env[61972]: _type = "Task" [ 933.324624] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.333977] env[61972]: DEBUG oslo_vmware.api [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203789} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.338411] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463f3cc5-4049-4d3b-b6d4-41ad9395995a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.342182] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.342182] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.342182] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.342964] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 933.342964] env[61972]: value = "task-1389437" [ 933.342964] env[61972]: _type = "Task" [ 933.342964] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.343727] env[61972]: DEBUG nova.network.neutron [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Successfully updated port: afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.351631] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389436, 'name': Rename_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.366443] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "e8582450-36c2-4d6b-89ee-6fef324063c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.366868] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "e8582450-36c2-4d6b-89ee-6fef324063c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.370736] env[61972]: DEBUG nova.compute.provider_tree [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.370736] env[61972]: INFO nova.scheduler.client.report [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocations for instance c274f675-f45e-49e7-8bf3-582a6977d95c [ 933.378743] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 23%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.401947] env[61972]: DEBUG oslo_vmware.api [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389432, 'name': PowerOnVM_Task, 'duration_secs': 0.518839} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.402251] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.402475] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-9237d243-9e75-4c51-96a1-f7ebede64e9c tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance '84e07f61-2111-43cb-93a2-9cb47ac52503' progress to 100 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 933.612170] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 933.612933] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-91d6e164-28f9-4581-b943-5b53ab069c52 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.622037] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 933.622037] env[61972]: value = "task-1389438" [ 933.622037] env[61972]: _type = "Task" [ 933.622037] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.630576] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389438, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.674300] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.674553] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61972) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 933.674741] env[61972]: DEBUG nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 933.674913] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.691095] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.731375] env[61972]: DEBUG oslo_vmware.api [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389434, 'name': PowerOnVM_Task, 'duration_secs': 0.509988} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.731968] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.732214] env[61972]: INFO nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Took 9.51 seconds to spawn the instance on the hypervisor. [ 933.732475] env[61972]: DEBUG nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 933.733345] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62239ece-4935-4ad9-a23b-150eb9137c87 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.836500] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389436, 'name': Rename_Task, 'duration_secs': 0.358076} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.836645] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.836941] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d095e159-2918-43d9-9c21-2c6b1281ce0e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.843504] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 933.843504] env[61972]: value = "task-1389439" [ 933.843504] env[61972]: _type = "Task" [ 933.843504] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.851829] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-65c02563-a348-4415-bb21-3d3711202838" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.851971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-65c02563-a348-4415-bb21-3d3711202838" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.852138] env[61972]: DEBUG nova.network.neutron [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.854043] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.863796] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.872950] env[61972]: DEBUG nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 933.877366] env[61972]: DEBUG nova.scheduler.client.report [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 933.881620] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.131677] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389438, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.194410] env[61972]: DEBUG nova.network.neutron [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.256299] env[61972]: INFO nova.compute.manager [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Took 23.28 seconds to build instance. [ 934.260029] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Updating instance_info_cache with network_info: [{"id": "515acb28-dc72-4bb7-804b-4fb5de167f99", "address": "fa:16:3e:b1:34:de", "network": {"id": "3a3bad14-0acb-42a3-8bd9-5b6ea2eca868", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d3f24b94cb854f4b925e1be405c7df82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7836a5b-a91e-4d3f-8e96-afe024f62bb5", "external-id": "nsx-vlan-transportzone-419", "segmentation_id": 419, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap515acb28-dc", "ovs_interfaceid": "515acb28-dc72-4bb7-804b-4fb5de167f99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.354730] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389439, 'name': PowerOnVM_Task} progress is 93%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.370083] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.383370] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.389320] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.178s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.389571] env[61972]: DEBUG nova.objects.instance [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lazy-loading 'resources' on Instance uuid 12a1a1ee-9aa1-4dda-9276-68492718e404 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.391747] env[61972]: DEBUG nova.network.neutron [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.404485] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.411368] env[61972]: INFO nova.scheduler.client.report [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Deleted allocations for instance b9726bf4-a4b1-4b22-840f-98157d0d790c [ 934.557203] env[61972]: DEBUG nova.network.neutron [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Updating instance_info_cache with network_info: [{"id": "afa659ca-956e-457f-8091-aa362b2ef1e3", "address": "fa:16:3e:4c:51:f8", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafa659ca-95", "ovs_interfaceid": "afa659ca-956e-457f-8091-aa362b2ef1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.633467] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389438, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.698342] env[61972]: INFO nova.compute.manager [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Took 1.02 seconds to deallocate network for instance. [ 934.745159] env[61972]: DEBUG nova.compute.manager [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Received event network-changed-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 934.745159] env[61972]: DEBUG nova.compute.manager [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Refreshing instance network info cache due to event network-changed-c8e9a7e3-a835-49e7-a4a5-9b864104e5fb. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 934.745159] env[61972]: DEBUG oslo_concurrency.lockutils [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] Acquiring lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.748014] env[61972]: DEBUG oslo_concurrency.lockutils [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] Acquired lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.748014] env[61972]: DEBUG nova.network.neutron [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Refreshing network info cache for port c8e9a7e3-a835-49e7-a4a5-9b864104e5fb {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.759695] env[61972]: DEBUG oslo_concurrency.lockutils [None req-49f1a76a-bfcf-4444-a3f0-242769b8abc2 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.791s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.762329] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Releasing lock "refresh_cache-e2b6dd4e-b639-4553-a45f-87c155506ea3" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.762596] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Updated the network info_cache for instance {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 934.762794] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.763085] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.763312] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.763510] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.764035] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.764552] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.764698] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 934.764865] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.853402] env[61972]: DEBUG oslo_vmware.api [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389439, 'name': PowerOnVM_Task, 'duration_secs': 0.736529} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.853598] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.853854] env[61972]: INFO nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Took 8.21 seconds to spawn the instance on the hypervisor. [ 934.854130] env[61972]: DEBUG nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 934.855046] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2957b480-3d96-464f-a163-56df271b6689 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.865928] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.930988] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f8bb3ed3-ec96-4d3d-a6ba-77f615f7c6fa tempest-ServersTestFqdnHostnames-1802778164 tempest-ServersTestFqdnHostnames-1802778164-project-member] Lock "b9726bf4-a4b1-4b22-840f-98157d0d790c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.448s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.062206] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-65c02563-a348-4415-bb21-3d3711202838" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.062591] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Instance network_info: |[{"id": "afa659ca-956e-457f-8091-aa362b2ef1e3", "address": "fa:16:3e:4c:51:f8", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafa659ca-95", "ovs_interfaceid": "afa659ca-956e-457f-8091-aa362b2ef1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 935.063155] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:51:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'afa659ca-956e-457f-8091-aa362b2ef1e3', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.072151] env[61972]: DEBUG oslo.service.loopingcall [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.075663] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65c02563-a348-4415-bb21-3d3711202838] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.077038] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d725e46-57d0-49af-832a-65688536be26 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.100625] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.100625] env[61972]: value = "task-1389440" [ 935.100625] env[61972]: _type = "Task" [ 935.100625] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.109966] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389440, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.134141] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389438, 'name': CloneVM_Task} progress is 95%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.136562] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44069fca-f376-4462-89e5-ecd623490384 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.144605] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a30ccad-2819-43c6-b1ce-7c4c28d8816b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.197333] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5656612-0932-4497-abd2-2b7c58b38920 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.220662] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442e422-0dac-49ef-9c12-6b99c3d75d94 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.241850] env[61972]: DEBUG nova.compute.provider_tree [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.267991] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.321068] env[61972]: DEBUG nova.compute.manager [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Received event network-changed-afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 935.321317] env[61972]: DEBUG nova.compute.manager [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Refreshing instance network info cache due to event network-changed-afa659ca-956e-457f-8091-aa362b2ef1e3. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 935.321533] env[61972]: DEBUG oslo_concurrency.lockutils [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] Acquiring lock "refresh_cache-65c02563-a348-4415-bb21-3d3711202838" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.321624] env[61972]: DEBUG oslo_concurrency.lockutils [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] Acquired lock "refresh_cache-65c02563-a348-4415-bb21-3d3711202838" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.322280] env[61972]: DEBUG nova.network.neutron [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Refreshing network info cache for port afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.365020] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.379631] env[61972]: INFO nova.compute.manager [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Took 21.38 seconds to build instance. [ 935.436241] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "c274f675-f45e-49e7-8bf3-582a6977d95c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.550030] env[61972]: DEBUG nova.network.neutron [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updated VIF entry in instance network info cache for port c8e9a7e3-a835-49e7-a4a5-9b864104e5fb. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.550525] env[61972]: DEBUG nova.network.neutron [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updating instance_info_cache with network_info: [{"id": "c8e9a7e3-a835-49e7-a4a5-9b864104e5fb", "address": "fa:16:3e:a9:1d:15", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": null, "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc8e9a7e3-a8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.614293] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389440, 'name': CreateVM_Task, 'duration_secs': 0.418775} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.614293] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65c02563-a348-4415-bb21-3d3711202838] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.614557] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.614775] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.615144] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 935.615424] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e4e3f44-d1a5-4d1a-ad3d-f121c4d4026b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.620765] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 935.620765] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]522ad9a4-28d5-53f8-fce6-146a11c8b34c" [ 935.620765] env[61972]: _type = "Task" [ 935.620765] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.635286] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]522ad9a4-28d5-53f8-fce6-146a11c8b34c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.639386] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389438, 'name': CloneVM_Task, 'duration_secs': 1.641662} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.639915] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Created linked-clone VM from snapshot [ 935.640804] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379895fd-2256-4c67-a8b0-5133332cb42f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.649141] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Uploading image f2b2602a-d38e-4ffb-b305-ed7666354ac0 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 935.680982] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 935.680982] env[61972]: value = "vm-294894" [ 935.680982] env[61972]: _type = "VirtualMachine" [ 935.680982] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 935.681706] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1f628a1-651e-4fdc-84d9-8a8f81f704ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.688437] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lease: (returnval){ [ 935.688437] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52669aa2-953c-6b1b-4ba8-927d7194b879" [ 935.688437] env[61972]: _type = "HttpNfcLease" [ 935.688437] env[61972]: } obtained for exporting VM: (result){ [ 935.688437] env[61972]: value = "vm-294894" [ 935.688437] env[61972]: _type = "VirtualMachine" [ 935.688437] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 935.688757] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the lease: (returnval){ [ 935.688757] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52669aa2-953c-6b1b-4ba8-927d7194b879" [ 935.688757] env[61972]: _type = "HttpNfcLease" [ 935.688757] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 935.694991] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 935.694991] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52669aa2-953c-6b1b-4ba8-927d7194b879" [ 935.694991] env[61972]: _type = "HttpNfcLease" [ 935.694991] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 935.738559] env[61972]: INFO nova.scheduler.client.report [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleted allocations for instance a77d41aa-13ba-4d26-b5fd-4928891948ce [ 935.747230] env[61972]: DEBUG nova.scheduler.client.report [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 935.780775] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "84e07f61-2111-43cb-93a2-9cb47ac52503" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.783297] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.783297] env[61972]: DEBUG nova.compute.manager [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Going to confirm migration 1 {{(pid=61972) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 935.868371] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.882862] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f5e5c0e5-92b0-40b6-8e75-9d106e50615b tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.899s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.054116] env[61972]: DEBUG oslo_concurrency.lockutils [req-ddde65f4-8a7a-4889-8f12-4779eecf7d0d req-d2055839-6100-47bb-8b53-a747b504b15a service nova] Releasing lock "refresh_cache-c274f675-f45e-49e7-8bf3-582a6977d95c" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.065147] env[61972]: DEBUG nova.network.neutron [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Updated VIF entry in instance network info cache for port afa659ca-956e-457f-8091-aa362b2ef1e3. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.065620] env[61972]: DEBUG nova.network.neutron [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Updating instance_info_cache with network_info: [{"id": "afa659ca-956e-457f-8091-aa362b2ef1e3", "address": "fa:16:3e:4c:51:f8", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafa659ca-95", "ovs_interfaceid": "afa659ca-956e-457f-8091-aa362b2ef1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.133593] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]522ad9a4-28d5-53f8-fce6-146a11c8b34c, 'name': SearchDatastore_Task, 'duration_secs': 0.025894} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.133917] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.134178] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.134441] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.134840] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.135049] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.135353] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80c9258f-b7a3-442d-ab13-e0fe44582ff7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.146626] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.146880] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 936.147746] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d362b1f7-008b-48bd-9147-1ad01587b8af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.153018] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 936.153018] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d1b11c-501e-df0e-020d-eaad52bb292b" [ 936.153018] env[61972]: _type = "Task" [ 936.153018] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.162948] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d1b11c-501e-df0e-020d-eaad52bb292b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.196237] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 936.196237] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52669aa2-953c-6b1b-4ba8-927d7194b879" [ 936.196237] env[61972]: _type = "HttpNfcLease" [ 936.196237] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 936.196550] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 936.196550] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52669aa2-953c-6b1b-4ba8-927d7194b879" [ 936.196550] env[61972]: _type = "HttpNfcLease" [ 936.196550] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 936.197326] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf2f1c5-1ef1-4e4a-a07e-ff0cf1a6e794 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.205250] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ce3db4-6bc6-4c22-a5cc-d504c04de3d0/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 936.205428] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ce3db4-6bc6-4c22-a5cc-d504c04de3d0/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 936.280062] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.282356] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f5be6a1-b880-4401-bd41-6ba31a0eb199 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "a77d41aa-13ba-4d26-b5fd-4928891948ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.827s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.284126] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.058s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.284379] env[61972]: DEBUG nova.objects.instance [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lazy-loading 'resources' on Instance uuid 94bd64b9-3d20-4631-baed-4500f9beb9c2 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.285620] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "a77d41aa-13ba-4d26-b5fd-4928891948ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 23.017s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.285848] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "a77d41aa-13ba-4d26-b5fd-4928891948ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.286996] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "a77d41aa-13ba-4d26-b5fd-4928891948ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.287209] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "a77d41aa-13ba-4d26-b5fd-4928891948ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.296109] env[61972]: INFO nova.compute.manager [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Terminating instance [ 936.313421] env[61972]: INFO nova.scheduler.client.report [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Deleted allocations for instance 12a1a1ee-9aa1-4dda-9276-68492718e404 [ 936.366402] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.384881] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a28c8cc3-84de-4fda-9567-e9d8edba3b93 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.420054] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.420268] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.420469] env[61972]: DEBUG nova.network.neutron [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.420636] env[61972]: DEBUG nova.objects.instance [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'info_cache' on Instance uuid 84e07f61-2111-43cb-93a2-9cb47ac52503 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.568083] env[61972]: DEBUG oslo_concurrency.lockutils [req-9d842797-ef98-470c-900c-ef4c292b92fd req-9bbfb463-beb6-4e7d-b70a-c093b04b88ed service nova] Releasing lock "refresh_cache-65c02563-a348-4415-bb21-3d3711202838" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.664146] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d1b11c-501e-df0e-020d-eaad52bb292b, 'name': SearchDatastore_Task, 'duration_secs': 0.016831} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.664997] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5a7c503-162e-4c22-9532-cf8fe1f5728d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.670380] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 936.670380] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52011f32-7ebe-e26c-bd5e-41c3bf47c77b" [ 936.670380] env[61972]: _type = "Task" [ 936.670380] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.678243] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52011f32-7ebe-e26c-bd5e-41c3bf47c77b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.802086] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.802527] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.802724] env[61972]: DEBUG nova.network.neutron [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.824927] env[61972]: DEBUG oslo_concurrency.lockutils [None req-cae61409-1a11-4a36-98e9-ff4ba37637dc tempest-ServersNegativeTestMultiTenantJSON-133049993 tempest-ServersNegativeTestMultiTenantJSON-133049993-project-member] Lock "12a1a1ee-9aa1-4dda-9276-68492718e404" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.455s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.877484] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.976592] env[61972]: DEBUG nova.compute.manager [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 936.977697] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211db528-f6a2-419d-88bd-5e2f19c093f2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.029637] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389cb642-65b8-47d4-ac78-2bc22563ed7a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.040932] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd7798c-deb5-45fa-a603-22f2c3bbabd6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.082338] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b26ec5d-5033-4713-b517-9a9e27a610a5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.090815] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ed1c74-b966-4c8d-95b4-18470c21077f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.108035] env[61972]: DEBUG nova.compute.provider_tree [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.181512] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52011f32-7ebe-e26c-bd5e-41c3bf47c77b, 'name': SearchDatastore_Task, 'duration_secs': 0.013564} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.181935] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.182492] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 65c02563-a348-4415-bb21-3d3711202838/65c02563-a348-4415-bb21-3d3711202838.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.182714] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f446a6e7-d1c9-4a96-bd09-d30043205fa6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.190487] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 937.190487] env[61972]: value = "task-1389442" [ 937.190487] env[61972]: _type = "Task" [ 937.190487] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.199349] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.330226] env[61972]: DEBUG nova.network.neutron [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.359616] env[61972]: DEBUG nova.compute.manager [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Received event network-changed-98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 937.359933] env[61972]: DEBUG nova.compute.manager [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Refreshing instance network info cache due to event network-changed-98807bc5-c5af-4bd9-ad5e-8c3043878d76. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 937.360312] env[61972]: DEBUG oslo_concurrency.lockutils [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] Acquiring lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.360679] env[61972]: DEBUG oslo_concurrency.lockutils [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] Acquired lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.360904] env[61972]: DEBUG nova.network.neutron [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Refreshing network info cache for port 98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 937.372346] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.430371] env[61972]: DEBUG nova.network.neutron [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.494643] env[61972]: INFO nova.compute.manager [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] instance snapshotting [ 937.505019] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f138ee-903f-4612-8974-09f303aa95e6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.533976] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7193dd5-1f7e-4590-94c6-3b21458684cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.613161] env[61972]: DEBUG nova.scheduler.client.report [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 937.704560] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389442, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.818394] env[61972]: DEBUG nova.network.neutron [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [{"id": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "address": "fa:16:3e:c8:26:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd99d79a-cd", "ovs_interfaceid": "dd99d79a-cd6c-477b-88f4-45e9d019f331", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.881021] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.933625] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "refresh_cache-a77d41aa-13ba-4d26-b5fd-4928891948ce" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.934071] env[61972]: DEBUG nova.compute.manager [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 937.934881] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.934881] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9008cd9-0389-4467-8ede-a133a45bf4a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.948812] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0734b805-e85b-4195-b181-e19f68b74a7d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.985097] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a77d41aa-13ba-4d26-b5fd-4928891948ce could not be found. [ 937.985473] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.985740] env[61972]: INFO nova.compute.manager [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Took 0.05 seconds to destroy the instance on the hypervisor. [ 937.986081] env[61972]: DEBUG oslo.service.loopingcall [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.986383] env[61972]: DEBUG nova.compute.manager [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 937.986481] env[61972]: DEBUG nova.network.neutron [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.025578] env[61972]: DEBUG nova.network.neutron [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.054963] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Creating Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 938.059280] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-718c43dd-75ff-476b-a4d1-8dcec112ce19 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.068135] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 938.068135] env[61972]: value = "task-1389443" [ 938.068135] env[61972]: _type = "Task" [ 938.068135] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.077691] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389443, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.123766] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.130048] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.246s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.130048] env[61972]: DEBUG nova.objects.instance [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'resources' on Instance uuid c274f675-f45e-49e7-8bf3-582a6977d95c {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.153745] env[61972]: INFO nova.scheduler.client.report [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Deleted allocations for instance 94bd64b9-3d20-4631-baed-4500f9beb9c2 [ 938.206073] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678561} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.206535] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 65c02563-a348-4415-bb21-3d3711202838/65c02563-a348-4415-bb21-3d3711202838.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 938.206831] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 938.207800] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bcdf95c-326a-4f00-a17b-08c61c45f645 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.218543] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 938.218543] env[61972]: value = "task-1389444" [ 938.218543] env[61972]: _type = "Task" [ 938.218543] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.229314] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.271639] env[61972]: DEBUG nova.network.neutron [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updated VIF entry in instance network info cache for port 98807bc5-c5af-4bd9-ad5e-8c3043878d76. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.272032] env[61972]: DEBUG nova.network.neutron [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [{"id": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "address": "fa:16:3e:5b:7e:e2", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98807bc5-c5", "ovs_interfaceid": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.323836] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-84e07f61-2111-43cb-93a2-9cb47ac52503" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.323836] env[61972]: DEBUG nova.objects.instance [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'migration_context' on Instance uuid 84e07f61-2111-43cb-93a2-9cb47ac52503 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.375264] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389437, 'name': CloneVM_Task, 'duration_secs': 4.896293} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.375558] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Created linked-clone VM from snapshot [ 938.376340] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c5fd91-eaec-4524-b3be-9b9075ed5452 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.383940] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Uploading image 5d728988-1ea5-4476-a502-ce99a53302c1 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 938.421384] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 938.421384] env[61972]: value = "vm-294893" [ 938.421384] env[61972]: _type = "VirtualMachine" [ 938.421384] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 938.421702] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f78678f9-95f1-437b-8c10-e47c40908aaf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.428791] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lease: (returnval){ [ 938.428791] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f431c3-c868-3271-ae3a-a5ecc7549fd5" [ 938.428791] env[61972]: _type = "HttpNfcLease" [ 938.428791] env[61972]: } obtained for exporting VM: (result){ [ 938.428791] env[61972]: value = "vm-294893" [ 938.428791] env[61972]: _type = "VirtualMachine" [ 938.428791] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 938.429167] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the lease: (returnval){ [ 938.429167] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f431c3-c868-3271-ae3a-a5ecc7549fd5" [ 938.429167] env[61972]: _type = "HttpNfcLease" [ 938.429167] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 938.436151] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 938.436151] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f431c3-c868-3271-ae3a-a5ecc7549fd5" [ 938.436151] env[61972]: _type = "HttpNfcLease" [ 938.436151] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 938.527616] env[61972]: DEBUG nova.network.neutron [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.578771] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389443, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.632780] env[61972]: DEBUG nova.objects.instance [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'numa_topology' on Instance uuid c274f675-f45e-49e7-8bf3-582a6977d95c {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.661698] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ab9b7e48-2b1d-4187-8746-44d61f1e3a92 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "94bd64b9-3d20-4631-baed-4500f9beb9c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.869s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.729252] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073108} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.729590] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.730532] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b477e8-8b82-4cc5-b029-ab54b68e4f8b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.755321] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 65c02563-a348-4415-bb21-3d3711202838/65c02563-a348-4415-bb21-3d3711202838.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.755798] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34bf3b37-fefe-4d80-a9ff-da9f8a346bba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.774765] env[61972]: DEBUG oslo_concurrency.lockutils [req-8a064635-62bd-449d-b85e-20a2d3fcfb73 req-5a4ff83a-d777-4e99-92e3-c326578e1c67 service nova] Releasing lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.782264] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 938.782264] env[61972]: value = "task-1389446" [ 938.782264] env[61972]: _type = "Task" [ 938.782264] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.791741] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389446, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.829796] env[61972]: DEBUG nova.objects.base [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Object Instance<84e07f61-2111-43cb-93a2-9cb47ac52503> lazy-loaded attributes: info_cache,migration_context {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 938.830881] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a65c913-a611-4bea-90b5-32fb6289e665 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.852348] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d234924-0564-4f01-aab6-1497775b832e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.858301] env[61972]: DEBUG oslo_vmware.api [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 938.858301] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52648150-8879-574c-c2c1-9210515c8ad4" [ 938.858301] env[61972]: _type = "Task" [ 938.858301] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.867845] env[61972]: DEBUG oslo_vmware.api [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52648150-8879-574c-c2c1-9210515c8ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.939637] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 938.939637] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f431c3-c868-3271-ae3a-a5ecc7549fd5" [ 938.939637] env[61972]: _type = "HttpNfcLease" [ 938.939637] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 938.939974] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 938.939974] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52f431c3-c868-3271-ae3a-a5ecc7549fd5" [ 938.939974] env[61972]: _type = "HttpNfcLease" [ 938.939974] env[61972]: }. {{(pid=61972) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 938.940650] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c02131b-78d4-4615-901d-73b3b1721ede {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.949169] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521759a0-e12a-c94f-ac09-418c21e5615d/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 938.949392] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521759a0-e12a-c94f-ac09-418c21e5615d/disk-0.vmdk for reading. {{(pid=61972) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 939.031816] env[61972]: INFO nova.compute.manager [-] [instance: a77d41aa-13ba-4d26-b5fd-4928891948ce] Took 1.05 seconds to deallocate network for instance. [ 939.078870] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389443, 'name': CreateSnapshot_Task, 'duration_secs': 0.924196} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.079214] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Created Snapshot of the VM instance {{(pid=61972) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 939.080823] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11c69b4-afc3-42f0-a302-39021706c197 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.096657] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-130179c6-7eb5-4fb9-b2cb-13ab63dc53da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.138234] env[61972]: DEBUG nova.objects.base [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 939.296125] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389446, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.343489] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef93732d-bebb-4ea3-8961-1edd0d6bf2e1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.351013] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa13489-e2d5-42a8-886c-6c32f2eff54a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.385240] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4091bca2-5a75-45be-bb4e-5296e1f5de02 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.396102] env[61972]: DEBUG oslo_vmware.api [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52648150-8879-574c-c2c1-9210515c8ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.00957} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.399632] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.401388] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a5a537-1c4c-4e3b-b90e-11b7aa3a9340 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.420285] env[61972]: DEBUG nova.compute.provider_tree [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.603335] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Creating linked-clone VM from snapshot {{(pid=61972) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 939.603882] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9ec2508a-ba3f-489c-8407-976f81abc653 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.613687] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 939.613687] env[61972]: value = "task-1389447" [ 939.613687] env[61972]: _type = "Task" [ 939.613687] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.623112] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389447, 'name': CloneVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.793748] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389446, 'name': ReconfigVM_Task, 'duration_secs': 0.55523} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.794157] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 65c02563-a348-4415-bb21-3d3711202838/65c02563-a348-4415-bb21-3d3711202838.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 939.795290] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7038e2db-e9ce-4596-9b9f-6ace53e58b8e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.801741] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 939.801741] env[61972]: value = "task-1389448" [ 939.801741] env[61972]: _type = "Task" [ 939.801741] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.811258] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389448, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.930054] env[61972]: DEBUG nova.scheduler.client.report [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 940.066055] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5668790d-9d2b-4df4-9f69-667256b1eefa tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "a77d41aa-13ba-4d26-b5fd-4928891948ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.780s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.128213] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389447, 'name': CloneVM_Task} progress is 94%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.190312] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.195650] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.240152] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "e2b6dd4e-b639-4553-a45f-87c155506ea3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.240527] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.240764] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "e2b6dd4e-b639-4553-a45f-87c155506ea3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.241016] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.241325] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.250763] env[61972]: INFO nova.compute.manager [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Terminating instance [ 940.313662] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389448, 'name': Rename_Task, 'duration_secs': 0.204196} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.314162] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.314534] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-338bf694-ecb4-4a5d-a4ad-a6cc20619567 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.321846] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 940.321846] env[61972]: value = "task-1389449" [ 940.321846] env[61972]: _type = "Task" [ 940.321846] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.335567] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389449, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.437087] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.309s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.440917] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.036s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.444843] env[61972]: INFO nova.compute.claims [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.630180] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389447, 'name': CloneVM_Task} progress is 95%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.699856] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 940.756505] env[61972]: DEBUG nova.compute.manager [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 940.756891] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.758222] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b35b794-0278-4673-b2d2-56d6d4ce330f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.766100] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.766448] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43b43ede-9402-41c7-ae7f-33447a00a42d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.772962] env[61972]: DEBUG oslo_vmware.api [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 940.772962] env[61972]: value = "task-1389450" [ 940.772962] env[61972]: _type = "Task" [ 940.772962] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.781378] env[61972]: DEBUG oslo_vmware.api [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.833670] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389449, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.953390] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a2190bed-a3f3-440c-969b-dd04c27286f6 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.587s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.955063] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.519s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.955063] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.955318] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.955717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.958902] env[61972]: INFO nova.compute.manager [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Terminating instance [ 941.129117] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389447, 'name': CloneVM_Task, 'duration_secs': 1.380941} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.129748] env[61972]: INFO nova.virt.vmwareapi.vmops [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Created linked-clone VM from snapshot [ 941.130658] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d29a85-bea5-4ba0-bc8a-18cc6941ed6b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.139722] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Uploading image f88c88ab-7619-4b96-a6f8-405ee8e471b5 {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 941.229162] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.284690] env[61972]: DEBUG oslo_vmware.api [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389450, 'name': PowerOffVM_Task, 'duration_secs': 0.266519} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.285089] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.285487] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.285779] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f816567-72c9-4583-bfd6-9b1f04cf6bfa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.332058] env[61972]: DEBUG oslo_vmware.api [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389449, 'name': PowerOnVM_Task, 'duration_secs': 0.680072} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.332384] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.332566] env[61972]: INFO nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Took 8.63 seconds to spawn the instance on the hypervisor. [ 941.332746] env[61972]: DEBUG nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 941.333699] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a34972d-2604-40ff-a0e8-dd68224a71e6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.468141] env[61972]: DEBUG nova.compute.manager [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 941.468141] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.468141] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75eaf200-1e58-4aa2-ae8a-1505a2f3cba9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.478974] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356a5cd3-a5c8-45a9-97a3-89fc915d606c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.518962] env[61972]: WARNING nova.virt.vmwareapi.vmops [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c274f675-f45e-49e7-8bf3-582a6977d95c could not be found. [ 941.519563] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.519970] env[61972]: INFO nova.compute.manager [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 941.520399] env[61972]: DEBUG oslo.service.loopingcall [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.524629] env[61972]: DEBUG nova.compute.manager [-] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 941.527030] env[61972]: DEBUG nova.network.neutron [-] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.550668] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.550668] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.550668] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Deleting the datastore file [datastore2] e2b6dd4e-b639-4553-a45f-87c155506ea3 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.550668] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cddb3361-61fe-4c30-9937-dcf1a2ca4aff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.564882] env[61972]: DEBUG oslo_vmware.api [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for the task: (returnval){ [ 941.564882] env[61972]: value = "task-1389452" [ 941.564882] env[61972]: _type = "Task" [ 941.564882] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.578892] env[61972]: DEBUG oslo_vmware.api [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.681551] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d757064-3664-492a-b614-bc578f34a9c9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.693964] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12956c4-a473-44a3-961b-e7e0bb8a4186 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.730152] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f8bb6c-a155-4128-8909-30192493dd36 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.738264] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb66ca96-2ea8-4800-8d7e-27f9c9802609 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.757565] env[61972]: DEBUG nova.compute.provider_tree [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.854820] env[61972]: INFO nova.compute.manager [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Took 22.13 seconds to build instance. [ 942.077962] env[61972]: DEBUG oslo_vmware.api [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Task: {'id': task-1389452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185223} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.078437] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.078756] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.079064] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.079368] env[61972]: INFO nova.compute.manager [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Took 1.32 seconds to destroy the instance on the hypervisor. [ 942.079815] env[61972]: DEBUG oslo.service.loopingcall [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.081444] env[61972]: DEBUG nova.compute.manager [-] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 942.081750] env[61972]: DEBUG nova.network.neutron [-] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.261132] env[61972]: DEBUG nova.scheduler.client.report [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 942.326475] env[61972]: DEBUG nova.network.neutron [-] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.356207] env[61972]: DEBUG oslo_concurrency.lockutils [None req-203681e3-e34b-4969-b667-828e381a8803 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65c02563-a348-4415-bb21-3d3711202838" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.639s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.588483] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65c02563-a348-4415-bb21-3d3711202838" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.589023] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65c02563-a348-4415-bb21-3d3711202838" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.590262] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65c02563-a348-4415-bb21-3d3711202838-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.590674] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65c02563-a348-4415-bb21-3d3711202838-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.591210] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65c02563-a348-4415-bb21-3d3711202838-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.594743] env[61972]: INFO nova.compute.manager [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Terminating instance [ 942.766700] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.767286] env[61972]: DEBUG nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 942.770712] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.502s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.770974] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.771200] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 942.771586] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.372s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.773826] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b04d50-c05e-4987-8dac-d82308e9f0ac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.784063] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8f09fb-8887-43a3-a9d0-a72499b58628 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.801177] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e03b95-06bd-4190-a2fa-63473e10a8fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.809492] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52fbdba-4c96-4413-98ea-85c666add34b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.844216] env[61972]: INFO nova.compute.manager [-] [instance: c274f675-f45e-49e7-8bf3-582a6977d95c] Took 1.32 seconds to deallocate network for instance. [ 942.844737] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180040MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 942.844899] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.931576] env[61972]: DEBUG nova.network.neutron [-] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.100286] env[61972]: DEBUG nova.compute.manager [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 943.100552] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.101592] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401ff906-0c70-447e-9508-39d863751e9e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.110539] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.110853] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0be2d482-be2e-4af0-98f4-9daf06c7fe20 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.121133] env[61972]: DEBUG oslo_vmware.api [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 943.121133] env[61972]: value = "task-1389453" [ 943.121133] env[61972]: _type = "Task" [ 943.121133] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.135767] env[61972]: DEBUG oslo_vmware.api [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.274687] env[61972]: DEBUG nova.compute.utils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.276580] env[61972]: DEBUG nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 943.436137] env[61972]: INFO nova.compute.manager [-] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Took 1.35 seconds to deallocate network for instance. [ 943.465664] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f3e8d3-6c58-4276-b75f-8eced6a635db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.474725] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfd7ad2-b81f-4d9c-8aeb-2952aac4e12d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.506967] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269b07a5-460f-4f19-903d-43dd998af06c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.516145] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2e192d-1f6e-4232-9e01-3fa1b44959c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.533068] env[61972]: DEBUG nova.compute.provider_tree [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.634164] env[61972]: DEBUG oslo_vmware.api [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389453, 'name': PowerOffVM_Task, 'duration_secs': 0.293457} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.634497] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.634673] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 943.634946] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7ba6f5b-25ac-4848-bb03-4c1ae709ebba {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.709692] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.709924] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.710129] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore2] 65c02563-a348-4415-bb21-3d3711202838 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.710413] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-616235c6-6f7b-4f79-b2ba-99af133042b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.717216] env[61972]: DEBUG oslo_vmware.api [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 943.717216] env[61972]: value = "task-1389455" [ 943.717216] env[61972]: _type = "Task" [ 943.717216] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.725903] env[61972]: DEBUG oslo_vmware.api [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.778081] env[61972]: DEBUG nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 943.875396] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f2607f36-bddd-4cb2-874d-45cbaf20d134 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "c274f675-f45e-49e7-8bf3-582a6977d95c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.921s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.945029] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.018979] env[61972]: DEBUG nova.compute.manager [req-3f3f91a9-159a-40fa-b2d8-591de3f68048 req-9284121a-491c-4c34-9f7e-30c0a45e1208 service nova] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Received event network-vif-deleted-515acb28-dc72-4bb7-804b-4fb5de167f99 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 944.037464] env[61972]: DEBUG nova.scheduler.client.report [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 944.227701] env[61972]: DEBUG oslo_vmware.api [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185455} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.227982] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.228207] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.228396] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.228570] env[61972]: INFO nova.compute.manager [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65c02563-a348-4415-bb21-3d3711202838] Took 1.13 seconds to destroy the instance on the hypervisor. [ 944.228816] env[61972]: DEBUG oslo.service.loopingcall [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.229027] env[61972]: DEBUG nova.compute.manager [-] [instance: 65c02563-a348-4415-bb21-3d3711202838] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 944.229126] env[61972]: DEBUG nova.network.neutron [-] [instance: 65c02563-a348-4415-bb21-3d3711202838] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.788219] env[61972]: DEBUG nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 944.992849] env[61972]: DEBUG nova.network.neutron [-] [instance: 65c02563-a348-4415-bb21-3d3711202838] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.049584] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.277s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.052562] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.823s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.054396] env[61972]: INFO nova.compute.claims [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.495906] env[61972]: INFO nova.compute.manager [-] [instance: 65c02563-a348-4415-bb21-3d3711202838] Took 1.27 seconds to deallocate network for instance. [ 945.546358] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 945.546677] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-59febbf7-9eba-4420-b4f5-ad6774a9a4ed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.555093] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 945.555093] env[61972]: value = "task-1389456" [ 945.555093] env[61972]: _type = "Task" [ 945.555093] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.573682] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389456, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.584652] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 945.584917] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 945.585158] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.585296] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 945.585443] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.585619] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 945.585865] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 945.586043] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 945.586218] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 945.586396] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 945.586602] env[61972]: DEBUG nova.virt.hardware [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 945.589077] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3449877b-9a07-4c70-b947-b4a389e8c94f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.600957] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ce3db4-6bc6-4c22-a5cc-d504c04de3d0/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 945.602303] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a31b1a-9397-4cbe-87d3-d36a5d5363bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.607207] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc163c0-5037-412c-8158-862b7c83a1d5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.613517] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ce3db4-6bc6-4c22-a5cc-d504c04de3d0/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 945.613752] env[61972]: ERROR oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ce3db4-6bc6-4c22-a5cc-d504c04de3d0/disk-0.vmdk due to incomplete transfer. [ 945.621892] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b7d277b8-6516-4c87-bdcb-0fe684f7d915 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.624338] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 945.630166] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Creating folder: Project (2871177da8c0496a8b2821bc041c283b). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 945.630532] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9475b7f8-65d1-4182-a4e4-b16ce58af37a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.636914] env[61972]: INFO nova.scheduler.client.report [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocation for migration 82a52ded-77e8-48e1-ae67-66fa8c324ccb [ 945.645665] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Created folder: Project (2871177da8c0496a8b2821bc041c283b) in parent group-v294799. [ 945.645873] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Creating folder: Instances. Parent ref: group-v294898. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 945.646162] env[61972]: DEBUG oslo_vmware.rw_handles [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ce3db4-6bc6-4c22-a5cc-d504c04de3d0/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 945.646347] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Uploaded image f2b2602a-d38e-4ffb-b305-ed7666354ac0 to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 945.648643] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 945.648916] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22170193-e94c-4462-a099-e0e7a44c8ffc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.650669] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0dad8a4e-244a-4f3f-8265-3bdced3f1d61 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.659043] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 945.659043] env[61972]: value = "task-1389459" [ 945.659043] env[61972]: _type = "Task" [ 945.659043] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.662873] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Created folder: Instances in parent group-v294898. [ 945.663157] env[61972]: DEBUG oslo.service.loopingcall [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.663894] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 945.664279] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57adfa5e-6e7c-4d06-b07f-6b1aae41f1b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.684121] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389459, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.690258] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 945.690258] env[61972]: value = "task-1389460" [ 945.690258] env[61972]: _type = "Task" [ 945.690258] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.698339] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389460, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.003844] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.049469] env[61972]: DEBUG nova.compute.manager [req-18a8d866-9ffe-44bf-b2a9-d6fa9bac9075 req-8f405afd-3bf9-4636-9452-1e83ccb900f2 service nova] [instance: 65c02563-a348-4415-bb21-3d3711202838] Received event network-vif-deleted-afa659ca-956e-457f-8091-aa362b2ef1e3 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 946.064774] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389456, 'name': Destroy_Task, 'duration_secs': 0.44415} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.065019] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Destroyed the VM [ 946.065295] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 946.065567] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-22725f27-5bdf-458c-b6c7-95e043811555 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.074187] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 946.074187] env[61972]: value = "task-1389461" [ 946.074187] env[61972]: _type = "Task" [ 946.074187] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.082819] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389461, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.148229] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1761fee3-5a84-4884-82ba-f6518113904f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.367s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.171949] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389459, 'name': Destroy_Task, 'duration_secs': 0.432947} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.172259] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Destroyed the VM [ 946.172502] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 946.172767] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-db9da238-c929-4f2b-8593-c276e67bfd46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.180343] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 946.180343] env[61972]: value = "task-1389462" [ 946.180343] env[61972]: _type = "Task" [ 946.180343] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.197593] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389462, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.205156] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389460, 'name': CreateVM_Task, 'duration_secs': 0.30899} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.207818] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 946.208914] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.208914] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.209193] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 946.209476] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcf932c2-8e58-40bb-904f-7b4fd49a17a8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.214980] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 946.214980] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5293b410-19bf-2b22-7783-6c767fc3fa00" [ 946.214980] env[61972]: _type = "Task" [ 946.214980] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.226904] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5293b410-19bf-2b22-7783-6c767fc3fa00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.232107] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e090a22-5d4c-40dd-bbbc-7c7fbfa24639 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.239815] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439989c7-57d0-4068-ba03-28330b1f4c00 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.272899] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d009150f-fe47-4ef7-9e0a-72c4ae93acfc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.281479] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc9b7e5-b735-4fc4-9216-f64d0c5167d6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.296314] env[61972]: DEBUG nova.compute.provider_tree [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.572213] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.572578] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.585618] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389461, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.689304] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389462, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.726895] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5293b410-19bf-2b22-7783-6c767fc3fa00, 'name': SearchDatastore_Task, 'duration_secs': 0.011655} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.727141] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.727380] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.727619] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.727794] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.728050] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.728334] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16e35307-20a3-48d6-8af2-25da3d98c057 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.737868] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.738091] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 946.739030] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7be8e58-a59c-44c8-9f8a-fcc3bdf5da56 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.746930] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 946.746930] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c79224-9948-300a-4e3a-8c6107756f62" [ 946.746930] env[61972]: _type = "Task" [ 946.746930] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.754621] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c79224-9948-300a-4e3a-8c6107756f62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.799837] env[61972]: DEBUG nova.scheduler.client.report [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 947.074827] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 947.088459] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389461, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.191284] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389462, 'name': RemoveSnapshot_Task, 'duration_secs': 0.622585} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.192108] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 947.192108] env[61972]: DEBUG nova.compute.manager [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 947.192949] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430d6165-c37b-43e8-a3e8-bce331532602 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.259031] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c79224-9948-300a-4e3a-8c6107756f62, 'name': SearchDatastore_Task, 'duration_secs': 0.010995} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.259799] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e273d70-4d23-4d55-b00d-d92729e6e3c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.265208] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 947.265208] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52aeaf23-5a05-68dc-7ef3-6b2fd098410b" [ 947.265208] env[61972]: _type = "Task" [ 947.265208] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.273531] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52aeaf23-5a05-68dc-7ef3-6b2fd098410b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.308141] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.308914] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 947.311797] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "84e07f61-2111-43cb-93a2-9cb47ac52503" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.312051] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.312331] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.312533] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.312709] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.314479] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.470s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.316395] env[61972]: INFO nova.compute.manager [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Terminating instance [ 947.591558] env[61972]: DEBUG oslo_vmware.api [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389461, 'name': RemoveSnapshot_Task, 'duration_secs': 1.094212} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.591898] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 947.603135] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.705525] env[61972]: INFO nova.compute.manager [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Shelve offloading [ 947.750636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.750636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.778635] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52aeaf23-5a05-68dc-7ef3-6b2fd098410b, 'name': SearchDatastore_Task, 'duration_secs': 0.009623} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.779218] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.779496] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] e8582450-36c2-4d6b-89ee-6fef324063c4/e8582450-36c2-4d6b-89ee-6fef324063c4.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 947.779766] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28b54e1c-232b-4137-a2b0-35e19450ba81 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.786356] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 947.786356] env[61972]: value = "task-1389463" [ 947.786356] env[61972]: _type = "Task" [ 947.786356] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.794297] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.816481] env[61972]: DEBUG nova.compute.utils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.818405] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 947.818697] env[61972]: DEBUG nova.network.neutron [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 947.826142] env[61972]: DEBUG nova.compute.manager [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 947.826380] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.827419] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd66fa8c-d892-4cd2-8f23-6d87b05a1f9f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.836315] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.836558] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18dee894-a78a-4244-9e45-5de0190e6a5f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.842879] env[61972]: DEBUG oslo_vmware.api [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 947.842879] env[61972]: value = "task-1389464" [ 947.842879] env[61972]: _type = "Task" [ 947.842879] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.852540] env[61972]: DEBUG oslo_vmware.api [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.886307] env[61972]: DEBUG nova.policy [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47ebbe5ddb8b41bbb1a54cf191aef61a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '651d8f34661542219f5451bce866ec02', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 948.097220] env[61972]: WARNING nova.compute.manager [None req-82b5e122-eac8-417f-8aae-dc56d15a7dfe tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Image not found during snapshot: nova.exception.ImageNotFound: Image f88c88ab-7619-4b96-a6f8-405ee8e471b5 could not be found. [ 948.209590] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.210642] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dbe41d9-ba31-41da-bd2f-32c5cc129f8c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.219846] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 948.219846] env[61972]: value = "task-1389465" [ 948.219846] env[61972]: _type = "Task" [ 948.219846] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.236179] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] VM already powered off {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 948.236179] env[61972]: DEBUG nova.compute.manager [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 948.236179] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67df783-1153-41bf-95d7-523a42fa4dec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.246129] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.246364] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.246589] env[61972]: DEBUG nova.network.neutron [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.249973] env[61972]: DEBUG nova.network.neutron [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Successfully created port: fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.252455] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 948.297770] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389463, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.326525] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 948.352718] env[61972]: DEBUG oslo_vmware.api [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389464, 'name': PowerOffVM_Task, 'duration_secs': 0.298294} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.354026] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.354026] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.354193] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e355349-691f-4aa5-9a8c-bb433ba658c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.359738] env[61972]: WARNING nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e2b6dd4e-b639-4553-a45f-87c155506ea3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 948.359738] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9562558a-89ba-4169-bd0a-ad31fc0c33bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.359738] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 72435dc4-eae1-4606-bb32-e7e8e282d0b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.359738] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 56e21cf4-4dbc-4f72-97c0-082dd689c046 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.360053] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 84e07f61-2111-43cb-93a2-9cb47ac52503 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.360053] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance d2864436-05a3-421f-98fd-41df925727c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.360053] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance b03b1fe7-2eda-4505-a6f9-19c570b15d1e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.360053] env[61972]: WARNING nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 65c02563-a348-4415-bb21-3d3711202838 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 948.360221] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e8582450-36c2-4d6b-89ee-6fef324063c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.360221] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance f71d004b-5343-4ef3-8f37-8ff544c335a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.440274] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.440518] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.440575] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleting the datastore file [datastore2] 84e07f61-2111-43cb-93a2-9cb47ac52503 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.441216] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-569ece1c-fe81-4385-85c7-73db10e499a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.447700] env[61972]: DEBUG oslo_vmware.api [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 948.447700] env[61972]: value = "task-1389467" [ 948.447700] env[61972]: _type = "Task" [ 948.447700] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.456607] env[61972]: DEBUG oslo_vmware.api [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.577575] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521759a0-e12a-c94f-ac09-418c21e5615d/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 948.578501] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef711300-52e7-45d8-8a6e-c51275d1377b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.584907] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521759a0-e12a-c94f-ac09-418c21e5615d/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 948.585103] env[61972]: ERROR oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521759a0-e12a-c94f-ac09-418c21e5615d/disk-0.vmdk due to incomplete transfer. [ 948.585325] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-de05d736-9def-4383-a0fc-0fe88dd3484a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.591662] env[61972]: DEBUG oslo_vmware.rw_handles [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521759a0-e12a-c94f-ac09-418c21e5615d/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 948.591858] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Uploaded image 5d728988-1ea5-4476-a502-ce99a53302c1 to the Glance image server {{(pid=61972) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 948.593532] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Destroying the VM {{(pid=61972) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 948.593761] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-27e812ba-7a13-4f4b-8212-84e8537e7ecc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.599812] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 948.599812] env[61972]: value = "task-1389468" [ 948.599812] env[61972]: _type = "Task" [ 948.599812] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.607634] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389468, 'name': Destroy_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.782478] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.798141] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526043} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.798617] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] e8582450-36c2-4d6b-89ee-6fef324063c4/e8582450-36c2-4d6b-89ee-6fef324063c4.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 948.798727] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.798896] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b477a9d-ba65-43a0-a8ae-21850057b177 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.806751] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 948.806751] env[61972]: value = "task-1389469" [ 948.806751] env[61972]: _type = "Task" [ 948.806751] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.813963] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389469, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.862314] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.957899] env[61972]: DEBUG oslo_vmware.api [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287932} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.958181] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.958369] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.958551] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.958734] env[61972]: INFO nova.compute.manager [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Took 1.13 seconds to destroy the instance on the hypervisor. [ 948.958967] env[61972]: DEBUG oslo.service.loopingcall [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.959211] env[61972]: DEBUG nova.compute.manager [-] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 948.959296] env[61972]: DEBUG nova.network.neutron [-] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.110467] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389468, 'name': Destroy_Task, 'duration_secs': 0.327174} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.110752] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Destroyed the VM [ 949.111037] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deleting Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 949.111306] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-eced8748-e4e8-4d3f-adb3-39b5df41c89a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.118138] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 949.118138] env[61972]: value = "task-1389470" [ 949.118138] env[61972]: _type = "Task" [ 949.118138] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.127020] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389470, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.201484] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.201647] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.202013] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.202013] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.202211] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.204473] env[61972]: INFO nova.compute.manager [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Terminating instance [ 949.290413] env[61972]: DEBUG nova.network.neutron [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.315434] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389469, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078286} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.315715] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.317239] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e131610-c963-4f85-a7af-076cb8e41e3b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.342841] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] e8582450-36c2-4d6b-89ee-6fef324063c4/e8582450-36c2-4d6b-89ee-6fef324063c4.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.345529] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 949.347459] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e27b97bc-5f30-43e6-ba76-baccb771871c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.363608] env[61972]: DEBUG nova.compute.manager [req-61145ad3-9844-4867-9a39-2c67e57e1e14 req-1c9e0188-479e-4bec-b5a1-a49456e24e9f service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Received event network-vif-deleted-dd99d79a-cd6c-477b-88f4-45e9d019f331 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 949.363789] env[61972]: INFO nova.compute.manager [req-61145ad3-9844-4867-9a39-2c67e57e1e14 req-1c9e0188-479e-4bec-b5a1-a49456e24e9f service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Neutron deleted interface dd99d79a-cd6c-477b-88f4-45e9d019f331; detaching it from the instance and deleting it from the info cache [ 949.363962] env[61972]: DEBUG nova.network.neutron [req-61145ad3-9844-4867-9a39-2c67e57e1e14 req-1c9e0188-479e-4bec-b5a1-a49456e24e9f service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.368643] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 8745c578-de46-4ade-bf08-f0bc9bb300d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 949.368643] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 949.368643] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 949.374407] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 949.374407] env[61972]: value = "task-1389471" [ 949.374407] env[61972]: _type = "Task" [ 949.374407] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.388444] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389471, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.393523] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.393766] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.393927] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.394129] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.394284] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.394435] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.394644] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.394807] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.394979] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.395188] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.395404] env[61972]: DEBUG nova.virt.hardware [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.396612] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66714bb8-309f-46f6-b118-6c325e01f51a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.408989] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8848777-8b0e-427d-a32b-147b89a4065b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.620620] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19b8730-fb97-41d6-85c1-808a0ada937c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.633751] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389470, 'name': RemoveSnapshot_Task, 'duration_secs': 0.428517} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.635815] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deleted Snapshot of the VM instance {{(pid=61972) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 949.636535] env[61972]: DEBUG nova.compute.manager [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 949.637943] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850ce31d-356e-4338-bf9f-54e177c16fe9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.640279] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0d925e-5fdc-47d8-803e-ad130667803e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.674581] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92870dbe-f529-47b1-8463-217c986ee4d6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.682980] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233728c7-47d6-4f05-b3ed-24ae098ee622 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.696176] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.708083] env[61972]: DEBUG nova.compute.manager [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 949.708318] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.709448] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4a5049-e7e5-4b42-b980-a79cf1c818be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.716585] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.716911] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c75aee17-c730-4c2a-bf99-f06735c0b630 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.722534] env[61972]: DEBUG oslo_vmware.api [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 949.722534] env[61972]: value = "task-1389472" [ 949.722534] env[61972]: _type = "Task" [ 949.722534] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.732234] env[61972]: DEBUG oslo_vmware.api [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.757116] env[61972]: DEBUG nova.network.neutron [-] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.793107] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.811577] env[61972]: DEBUG nova.network.neutron [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Successfully updated port: fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.868297] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9938b6c-77f6-43e7-90b1-360816251aef {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.880810] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b444a71-c761-4a6c-b34c-776f25b8c792 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.896352] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389471, 'name': ReconfigVM_Task, 'duration_secs': 0.321229} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.898767] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Reconfigured VM instance instance-0000005a to attach disk [datastore2] e8582450-36c2-4d6b-89ee-6fef324063c4/e8582450-36c2-4d6b-89ee-6fef324063c4.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.906734] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25571537-c693-4e17-93df-498973a41f9e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.908680] env[61972]: DEBUG nova.compute.manager [req-61145ad3-9844-4867-9a39-2c67e57e1e14 req-1c9e0188-479e-4bec-b5a1-a49456e24e9f service nova] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Detach interface failed, port_id=dd99d79a-cd6c-477b-88f4-45e9d019f331, reason: Instance 84e07f61-2111-43cb-93a2-9cb47ac52503 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 949.913177] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 949.913177] env[61972]: value = "task-1389473" [ 949.913177] env[61972]: _type = "Task" [ 949.913177] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.920770] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389473, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.119086] env[61972]: DEBUG nova.compute.manager [req-0f577c8a-c859-45b9-a6a8-6b2c1ee12fd6 req-3af72a19-f6e5-4651-8c6b-36e4810d89bd service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-vif-unplugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 950.119332] env[61972]: DEBUG oslo_concurrency.lockutils [req-0f577c8a-c859-45b9-a6a8-6b2c1ee12fd6 req-3af72a19-f6e5-4651-8c6b-36e4810d89bd service nova] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.119459] env[61972]: DEBUG oslo_concurrency.lockutils [req-0f577c8a-c859-45b9-a6a8-6b2c1ee12fd6 req-3af72a19-f6e5-4651-8c6b-36e4810d89bd service nova] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.119629] env[61972]: DEBUG oslo_concurrency.lockutils [req-0f577c8a-c859-45b9-a6a8-6b2c1ee12fd6 req-3af72a19-f6e5-4651-8c6b-36e4810d89bd service nova] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.119751] env[61972]: DEBUG nova.compute.manager [req-0f577c8a-c859-45b9-a6a8-6b2c1ee12fd6 req-3af72a19-f6e5-4651-8c6b-36e4810d89bd service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] No waiting events found dispatching network-vif-unplugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 950.119924] env[61972]: WARNING nova.compute.manager [req-0f577c8a-c859-45b9-a6a8-6b2c1ee12fd6 req-3af72a19-f6e5-4651-8c6b-36e4810d89bd service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received unexpected event network-vif-unplugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b for instance with vm_state shelved and task_state shelving_offloading. [ 950.152781] env[61972]: INFO nova.compute.manager [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Shelve offloading [ 950.199086] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 950.218625] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.219520] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7ffb04-e798-4601-b480-1307f012e9fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.227631] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.228177] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2444a208-bf00-4ffa-a421-36ee47af3bce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.232514] env[61972]: DEBUG oslo_vmware.api [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389472, 'name': PowerOffVM_Task, 'duration_secs': 0.181115} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.232746] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.232916] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.233172] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb02e101-e8cc-4bb1-bcd5-417f3482a489 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.259782] env[61972]: INFO nova.compute.manager [-] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Took 1.30 seconds to deallocate network for instance. [ 950.313910] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.314081] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.314238] env[61972]: DEBUG nova.network.neutron [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.315943] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.315943] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.315943] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleting the datastore file [datastore2] 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.316294] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffa1cd47-d15c-4d79-9b5d-9fcc4a1d6b0c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.322488] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 950.322488] env[61972]: value = "task-1389476" [ 950.322488] env[61972]: _type = "Task" [ 950.322488] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.331806] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.424326] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389473, 'name': Rename_Task, 'duration_secs': 0.157362} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.424605] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 950.424858] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f6758f8-4049-435d-8b66-c68ed956ee9c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.431529] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 950.431529] env[61972]: value = "task-1389477" [ 950.431529] env[61972]: _type = "Task" [ 950.431529] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.440686] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.661565] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.662092] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-271f27c2-4ff4-4ba4-bdd1-a7c80eceaf52 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.669740] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 950.669740] env[61972]: value = "task-1389478" [ 950.669740] env[61972]: _type = "Task" [ 950.669740] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.681038] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] VM already powered off {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 950.681356] env[61972]: DEBUG nova.compute.manager [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 950.682432] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac04cfc-0409-4d98-9724-d550c7032864 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.689446] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.689701] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.689955] env[61972]: DEBUG nova.network.neutron [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.696106] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.696405] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.696669] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleting the datastore file [datastore1] b03b1fe7-2eda-4505-a6f9-19c570b15d1e {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.696995] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84bb63e2-d193-4fd4-a30a-99334f9cc845 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.704318] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 950.704581] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.390s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.705297] env[61972]: DEBUG oslo_vmware.api [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for the task: (returnval){ [ 950.705297] env[61972]: value = "task-1389479" [ 950.705297] env[61972]: _type = "Task" [ 950.705297] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.705606] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.761s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.705877] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.708463] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.705s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.708749] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.711037] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.108s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.713132] env[61972]: INFO nova.compute.claims [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.728942] env[61972]: DEBUG oslo_vmware.api [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.738911] env[61972]: INFO nova.scheduler.client.report [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance 65c02563-a348-4415-bb21-3d3711202838 [ 950.744051] env[61972]: INFO nova.scheduler.client.report [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Deleted allocations for instance e2b6dd4e-b639-4553-a45f-87c155506ea3 [ 950.766353] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.834043] env[61972]: DEBUG oslo_vmware.api [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13074} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.834347] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.834553] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.834757] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.847517] env[61972]: DEBUG nova.network.neutron [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.851180] env[61972]: INFO nova.scheduler.client.report [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleted allocations for instance 56e21cf4-4dbc-4f72-97c0-082dd689c046 [ 950.941218] env[61972]: DEBUG oslo_vmware.api [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389477, 'name': PowerOnVM_Task, 'duration_secs': 0.41454} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.941502] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.941710] env[61972]: INFO nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Took 6.15 seconds to spawn the instance on the hypervisor. [ 950.941898] env[61972]: DEBUG nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 950.942694] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6b4bbd-f42b-4fd3-aff3-8caaa31cac84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.022243] env[61972]: DEBUG nova.network.neutron [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updating instance_info_cache with network_info: [{"id": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "address": "fa:16:3e:d6:59:03", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdefc4b7-2c", "ovs_interfaceid": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.229435] env[61972]: DEBUG oslo_vmware.api [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Task: {'id': task-1389479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167481} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.230145] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.230196] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.230339] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.230515] env[61972]: INFO nova.compute.manager [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Took 1.52 seconds to destroy the instance on the hypervisor. [ 951.230860] env[61972]: DEBUG oslo.service.loopingcall [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.231144] env[61972]: DEBUG nova.compute.manager [-] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 951.231258] env[61972]: DEBUG nova.network.neutron [-] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.249171] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a592620f-0e71-454a-8d86-14b04c8909a4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65c02563-a348-4415-bb21-3d3711202838" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.660s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.252862] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5bcc80c2-d3bb-482a-96d5-c4b2b4fb4168 tempest-ListImageFiltersTestJSON-615186200 tempest-ListImageFiltersTestJSON-615186200-project-member] Lock "e2b6dd4e-b639-4553-a45f-87c155506ea3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.012s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.355672] env[61972]: DEBUG nova.compute.manager [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Received event network-vif-plugged-fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 951.355855] env[61972]: DEBUG oslo_concurrency.lockutils [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.356088] env[61972]: DEBUG oslo_concurrency.lockutils [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.356263] env[61972]: DEBUG oslo_concurrency.lockutils [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.356512] env[61972]: DEBUG nova.compute.manager [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] No waiting events found dispatching network-vif-plugged-fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 951.356804] env[61972]: WARNING nova.compute.manager [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Received unexpected event network-vif-plugged-fdefc4b7-2c39-496c-9909-b5e05cbdc1da for instance with vm_state building and task_state spawning. [ 951.356859] env[61972]: DEBUG nova.compute.manager [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Received event network-changed-fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 951.357120] env[61972]: DEBUG nova.compute.manager [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Refreshing instance network info cache due to event network-changed-fdefc4b7-2c39-496c-9909-b5e05cbdc1da. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 951.357284] env[61972]: DEBUG oslo_concurrency.lockutils [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] Acquiring lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.358680] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.404708] env[61972]: DEBUG nova.network.neutron [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.460991] env[61972]: INFO nova.compute.manager [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Took 17.08 seconds to build instance. [ 951.525867] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.525867] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Instance network_info: |[{"id": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "address": "fa:16:3e:d6:59:03", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdefc4b7-2c", "ovs_interfaceid": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 951.526077] env[61972]: DEBUG oslo_concurrency.lockutils [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] Acquired lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.526077] env[61972]: DEBUG nova.network.neutron [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Refreshing network info cache for port fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.526968] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:59:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdefc4b7-2c39-496c-9909-b5e05cbdc1da', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.534860] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Creating folder: Project (651d8f34661542219f5451bce866ec02). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 951.538030] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e472ea28-f9a0-4eb5-badd-ea69bb156f8b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.548627] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Created folder: Project (651d8f34661542219f5451bce866ec02) in parent group-v294799. [ 951.548715] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Creating folder: Instances. Parent ref: group-v294901. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 951.548923] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e46c1ca-4a9d-479f-9f4a-756055fdc362 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.557201] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Created folder: Instances in parent group-v294901. [ 951.557766] env[61972]: DEBUG oslo.service.loopingcall [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.557966] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.558184] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a342f763-d62d-43cd-8615-9642548f7c8d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.578133] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.578133] env[61972]: value = "task-1389482" [ 951.578133] env[61972]: _type = "Task" [ 951.578133] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.586360] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389482, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.746425] env[61972]: DEBUG nova.network.neutron [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updated VIF entry in instance network info cache for port fdefc4b7-2c39-496c-9909-b5e05cbdc1da. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.747860] env[61972]: DEBUG nova.network.neutron [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updating instance_info_cache with network_info: [{"id": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "address": "fa:16:3e:d6:59:03", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdefc4b7-2c", "ovs_interfaceid": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.864014] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107df011-9951-45e4-8105-4e060b26814e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.871913] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed0be39-e1ec-4610-8fca-33cc9a310f4c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.906802] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb24aa3-aa75-4356-a6da-00e236ceeeb8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.910185] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.916898] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96682086-b250-4451-8e8b-a0c022362d4f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.932863] env[61972]: DEBUG nova.compute.provider_tree [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.957401] env[61972]: DEBUG nova.compute.manager [None req-2221cd6e-f913-484e-8fdc-b3439fe214ce tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 951.957831] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96d32d3-4830-4f8f-998d-6e7327a44878 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.962563] env[61972]: DEBUG oslo_concurrency.lockutils [None req-500f40ff-c752-42dd-ae52-214ea5a6c56d tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "e8582450-36c2-4d6b-89ee-6fef324063c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.596s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.026154] env[61972]: DEBUG nova.network.neutron [-] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.091732] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389482, 'name': CreateVM_Task, 'duration_secs': 0.413516} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.091914] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.092653] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.092813] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.093184] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 952.093443] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-963fbeaa-0b2b-423c-b569-9afa05266d09 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.098522] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 952.098522] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b5557b-7022-041c-a612-7031f2f7eb97" [ 952.098522] env[61972]: _type = "Task" [ 952.098522] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.107056] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b5557b-7022-041c-a612-7031f2f7eb97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.129756] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "e8582450-36c2-4d6b-89ee-6fef324063c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.130034] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "e8582450-36c2-4d6b-89ee-6fef324063c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.130402] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "e8582450-36c2-4d6b-89ee-6fef324063c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.130460] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "e8582450-36c2-4d6b-89ee-6fef324063c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.131032] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "e8582450-36c2-4d6b-89ee-6fef324063c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.133258] env[61972]: INFO nova.compute.manager [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Terminating instance [ 952.177692] env[61972]: DEBUG nova.compute.manager [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 952.177905] env[61972]: DEBUG nova.compute.manager [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing instance network info cache due to event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 952.178137] env[61972]: DEBUG oslo_concurrency.lockutils [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.178323] env[61972]: DEBUG oslo_concurrency.lockutils [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.178453] env[61972]: DEBUG nova.network.neutron [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 952.217219] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.218241] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce163199-2ff0-4fa3-b4ef-50919d128760 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.226526] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.226784] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-672fa566-56e8-428a-8db3-4ef958d8d827 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.253489] env[61972]: DEBUG oslo_concurrency.lockutils [req-ae6912af-bfd1-4c10-9507-2912570880da req-5ffdd14d-cd52-4612-a916-38dda2b5ea62 service nova] Releasing lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.289940] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.290199] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.290429] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleting the datastore file [datastore1] 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.290701] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0b6dc7a-2db3-4b78-b993-bfa1104ea8d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.297267] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 952.297267] env[61972]: value = "task-1389484" [ 952.297267] env[61972]: _type = "Task" [ 952.297267] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.304607] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.435254] env[61972]: DEBUG nova.scheduler.client.report [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 952.471340] env[61972]: INFO nova.compute.manager [None req-2221cd6e-f913-484e-8fdc-b3439fe214ce tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] instance snapshotting [ 952.471340] env[61972]: DEBUG nova.objects.instance [None req-2221cd6e-f913-484e-8fdc-b3439fe214ce tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lazy-loading 'flavor' on Instance uuid e8582450-36c2-4d6b-89ee-6fef324063c4 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.529493] env[61972]: INFO nova.compute.manager [-] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Took 1.30 seconds to deallocate network for instance. [ 952.570766] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.570766] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.611291] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b5557b-7022-041c-a612-7031f2f7eb97, 'name': SearchDatastore_Task, 'duration_secs': 0.00952} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.611602] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.611840] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.612119] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.612297] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.612503] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.612803] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c94d69ec-e3fc-4332-933f-58e7aa3164a5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.621129] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.621337] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.622063] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d75b5238-6a76-4187-bcbc-4320cec3d4bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.627772] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 952.627772] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525a5abd-251a-2db0-2211-a78e8ad67b95" [ 952.627772] env[61972]: _type = "Task" [ 952.627772] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.634861] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525a5abd-251a-2db0-2211-a78e8ad67b95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.636709] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "refresh_cache-e8582450-36c2-4d6b-89ee-6fef324063c4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.636895] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquired lock "refresh_cache-e8582450-36c2-4d6b-89ee-6fef324063c4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.637089] env[61972]: DEBUG nova.network.neutron [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.807175] env[61972]: DEBUG oslo_vmware.api [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129024} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.807475] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.809209] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 952.809462] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.837471] env[61972]: INFO nova.scheduler.client.report [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted allocations for instance 9562558a-89ba-4169-bd0a-ad31fc0c33bc [ 952.921322] env[61972]: DEBUG nova.network.neutron [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updated VIF entry in instance network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.921581] env[61972]: DEBUG nova.network.neutron [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap89e228e1-2a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.940395] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.941052] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 952.943808] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.161s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.945162] env[61972]: INFO nova.compute.claims [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.976618] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdee81b-f375-496a-9121-80414b4ed36a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.994818] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3315f051-f3e5-4024-81de-88b19c73b51c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.038767] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.073550] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 953.137855] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525a5abd-251a-2db0-2211-a78e8ad67b95, 'name': SearchDatastore_Task, 'duration_secs': 0.008455} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.140465] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1621fb73-b5c1-4d0f-88ca-abaaf290f0cf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.146439] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 953.146439] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52184b1e-150a-a063-aaf2-45a2432e0439" [ 953.146439] env[61972]: _type = "Task" [ 953.146439] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.154140] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52184b1e-150a-a063-aaf2-45a2432e0439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.158566] env[61972]: DEBUG nova.network.neutron [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.214586] env[61972]: DEBUG nova.network.neutron [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.341962] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.411883] env[61972]: DEBUG nova.compute.manager [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Received event network-vif-deleted-36f8ea5c-3a0e-465b-86ad-a380d3b8f573 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 953.412189] env[61972]: DEBUG nova.compute.manager [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-vif-unplugged-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 953.412463] env[61972]: DEBUG oslo_concurrency.lockutils [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.412731] env[61972]: DEBUG oslo_concurrency.lockutils [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.412987] env[61972]: DEBUG oslo_concurrency.lockutils [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.413241] env[61972]: DEBUG nova.compute.manager [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] No waiting events found dispatching network-vif-unplugged-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.413721] env[61972]: WARNING nova.compute.manager [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received unexpected event network-vif-unplugged-96b44391-970b-458b-bb63-47288e6d18a2 for instance with vm_state shelved_offloaded and task_state None. [ 953.413721] env[61972]: DEBUG nova.compute.manager [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-changed-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 953.413883] env[61972]: DEBUG nova.compute.manager [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Refreshing instance network info cache due to event network-changed-96b44391-970b-458b-bb63-47288e6d18a2. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 953.414167] env[61972]: DEBUG oslo_concurrency.lockutils [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.414424] env[61972]: DEBUG oslo_concurrency.lockutils [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.414595] env[61972]: DEBUG nova.network.neutron [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Refreshing network info cache for port 96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.424611] env[61972]: DEBUG oslo_concurrency.lockutils [req-a010a485-b13f-4675-90e3-9c5cb8724c9f req-6cb68839-e4fe-4520-b533-eae297f5ed25 service nova] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.451925] env[61972]: DEBUG nova.compute.utils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.456125] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 953.456311] env[61972]: DEBUG nova.network.neutron [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.505977] env[61972]: DEBUG nova.compute.manager [None req-2221cd6e-f913-484e-8fdc-b3439fe214ce tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Instance disappeared during snapshot {{(pid=61972) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4580}} [ 953.512601] env[61972]: DEBUG nova.policy [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc3cd61498bc4f858a47a72f02466b3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3c052a272742808be2bcdc71d8f62f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 953.595246] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.647731] env[61972]: DEBUG nova.compute.manager [None req-2221cd6e-f913-484e-8fdc-b3439fe214ce tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Found 0 images (rotation: 2) {{(pid=61972) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 953.659605] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52184b1e-150a-a063-aaf2-45a2432e0439, 'name': SearchDatastore_Task, 'duration_secs': 0.010164} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.659880] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.660203] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] f71d004b-5343-4ef3-8f37-8ff544c335a2/f71d004b-5343-4ef3-8f37-8ff544c335a2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 953.660466] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c8640af-7615-4099-9e1e-753b5d10d6c6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.667538] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 953.667538] env[61972]: value = "task-1389485" [ 953.667538] env[61972]: _type = "Task" [ 953.667538] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.677758] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.716624] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Releasing lock "refresh_cache-e8582450-36c2-4d6b-89ee-6fef324063c4" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.717236] env[61972]: DEBUG nova.compute.manager [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 953.717535] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.718680] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2c1e73-12ec-45c3-81c6-afbc6bba5ba2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.728127] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.728127] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f86e68c-db99-4f7e-bb40-ab44605a86e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.734705] env[61972]: DEBUG oslo_vmware.api [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 953.734705] env[61972]: value = "task-1389486" [ 953.734705] env[61972]: _type = "Task" [ 953.734705] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.746407] env[61972]: DEBUG oslo_vmware.api [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.770132] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "bf32c8b2-51b4-495a-b340-5dbabdf33137" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.770132] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "bf32c8b2-51b4-495a-b340-5dbabdf33137" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.829620] env[61972]: DEBUG nova.network.neutron [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Successfully created port: eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.956506] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 954.073030] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "f0565271-2276-4f18-813a-6f9338183480" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.073403] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "f0565271-2276-4f18-813a-6f9338183480" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.107379] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.183899] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501349} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.184740] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] f71d004b-5343-4ef3-8f37-8ff544c335a2/f71d004b-5343-4ef3-8f37-8ff544c335a2.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.184740] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.184922] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bb2f169-9b20-4d59-81bd-5e1fea5f321d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.191889] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 954.191889] env[61972]: value = "task-1389487" [ 954.191889] env[61972]: _type = "Task" [ 954.191889] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.193482] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e37712e-247e-4ef5-b420-2005a7b37657 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.209058] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862c172b-2060-4398-b3c0-6128b4256bcc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.213844] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.240031] env[61972]: DEBUG nova.network.neutron [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updated VIF entry in instance network info cache for port 96b44391-970b-458b-bb63-47288e6d18a2. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.240401] env[61972]: DEBUG nova.network.neutron [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap96b44391-97", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.244744] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be93b2d-85af-4040-96bb-48494c2b2d29 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.253889] env[61972]: DEBUG oslo_vmware.api [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389486, 'name': PowerOffVM_Task, 'duration_secs': 0.127587} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.254744] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.254926] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.256261] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05a58c1-febd-4353-aaf5-4610d4750d2c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.259861] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d631f190-da54-4c6b-8e75-37552f050580 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.271336] env[61972]: DEBUG nova.compute.provider_tree [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.275439] env[61972]: DEBUG nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 954.286728] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.286928] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.287129] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Deleting the datastore file [datastore2] e8582450-36c2-4d6b-89ee-6fef324063c4 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.287600] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbb223f6-2d0d-4665-b6bf-5d212e008b91 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.293134] env[61972]: DEBUG oslo_vmware.api [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for the task: (returnval){ [ 954.293134] env[61972]: value = "task-1389489" [ 954.293134] env[61972]: _type = "Task" [ 954.293134] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.303152] env[61972]: DEBUG oslo_vmware.api [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.576319] env[61972]: DEBUG nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 954.680542] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.702378] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064418} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.702733] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.703447] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797d6730-c6da-4faa-9a2c-e9f89963c494 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.724594] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] f71d004b-5343-4ef3-8f37-8ff544c335a2/f71d004b-5343-4ef3-8f37-8ff544c335a2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.724875] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2329941-2e82-4b18-aad3-c1acc1490a3b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.744457] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 954.744457] env[61972]: value = "task-1389490" [ 954.744457] env[61972]: _type = "Task" [ 954.744457] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.749242] env[61972]: DEBUG oslo_concurrency.lockutils [req-1d43acb8-af03-4c0f-93de-80d89cbabc7a req-a9f8d59d-6884-4888-aa73-2d2ce4ba76ac service nova] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.754389] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.774518] env[61972]: DEBUG nova.scheduler.client.report [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 954.798566] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.804541] env[61972]: DEBUG oslo_vmware.api [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Task: {'id': task-1389489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098578} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.805508] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.805508] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.805508] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.805508] env[61972]: INFO nova.compute.manager [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Took 1.09 seconds to destroy the instance on the hypervisor. [ 954.805723] env[61972]: DEBUG oslo.service.loopingcall [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.805723] env[61972]: DEBUG nova.compute.manager [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 954.805807] env[61972]: DEBUG nova.network.neutron [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.821826] env[61972]: DEBUG nova.network.neutron [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.969438] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 954.994087] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.994350] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.994573] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.994807] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.994959] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.995124] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.995340] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.995500] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.995665] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.995826] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.995999] env[61972]: DEBUG nova.virt.hardware [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.996881] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7944055-7d28-4591-912c-d111551d423f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.004586] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5acdc73-8951-45bb-8c8c-bd24a0ae9a1a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.094926] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.254200] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389490, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.279194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.279712] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 955.282264] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.516s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.286234] env[61972]: DEBUG nova.objects.instance [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'resources' on Instance uuid 84e07f61-2111-43cb-93a2-9cb47ac52503 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.323695] env[61972]: DEBUG nova.network.neutron [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.361920] env[61972]: DEBUG nova.network.neutron [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Successfully updated port: eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.438944] env[61972]: DEBUG nova.compute.manager [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Received event network-vif-plugged-eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 955.439234] env[61972]: DEBUG oslo_concurrency.lockutils [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] Acquiring lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.439493] env[61972]: DEBUG oslo_concurrency.lockutils [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.439772] env[61972]: DEBUG oslo_concurrency.lockutils [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.439910] env[61972]: DEBUG nova.compute.manager [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] No waiting events found dispatching network-vif-plugged-eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 955.440473] env[61972]: WARNING nova.compute.manager [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Received unexpected event network-vif-plugged-eaccfe94-cbc5-4a94-b940-24e8d40565e2 for instance with vm_state building and task_state spawning. [ 955.440473] env[61972]: DEBUG nova.compute.manager [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Received event network-changed-eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 955.440585] env[61972]: DEBUG nova.compute.manager [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Refreshing instance network info cache due to event network-changed-eaccfe94-cbc5-4a94-b940-24e8d40565e2. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 955.440734] env[61972]: DEBUG oslo_concurrency.lockutils [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] Acquiring lock "refresh_cache-dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.440916] env[61972]: DEBUG oslo_concurrency.lockutils [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] Acquired lock "refresh_cache-dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.441377] env[61972]: DEBUG nova.network.neutron [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Refreshing network info cache for port eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.754973] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389490, 'name': ReconfigVM_Task, 'duration_secs': 0.509476} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.755319] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Reconfigured VM instance instance-0000005b to attach disk [datastore2] f71d004b-5343-4ef3-8f37-8ff544c335a2/f71d004b-5343-4ef3-8f37-8ff544c335a2.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.755961] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04ffb080-7aee-4842-8014-56f1124860fd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.762858] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 955.762858] env[61972]: value = "task-1389491" [ 955.762858] env[61972]: _type = "Task" [ 955.762858] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.770141] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389491, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.788789] env[61972]: DEBUG nova.compute.utils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.794233] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 955.794233] env[61972]: DEBUG nova.network.neutron [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.825698] env[61972]: INFO nova.compute.manager [-] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Took 1.02 seconds to deallocate network for instance. [ 955.846360] env[61972]: DEBUG nova.policy [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cefef67f4ae0451aaa108df20aa7a3db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a685a448ff041db8bc49b4429688e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 955.863798] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.970564] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc0fbd3-d9fb-4c4b-98b7-14eac2c9931d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.978029] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dac50c7-fa5d-4785-9957-569117bfe0c9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.008338] env[61972]: DEBUG nova.network.neutron [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.010536] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd23f27-ca2e-416e-bbbb-936650d0f4e6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.018170] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8afac13-e020-49e1-88a7-67b02b40d103 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.032585] env[61972]: DEBUG nova.compute.provider_tree [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.085583] env[61972]: DEBUG nova.network.neutron [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.089305] env[61972]: DEBUG nova.network.neutron [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Successfully created port: 78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.276231] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389491, 'name': Rename_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.294265] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 956.334408] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.536657] env[61972]: DEBUG nova.scheduler.client.report [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 956.587824] env[61972]: DEBUG oslo_concurrency.lockutils [req-2c53e76b-a604-45a7-b3d2-26008f1e8ed3 req-342734de-2ee6-4c31-aa63-f18231a4eecb service nova] Releasing lock "refresh_cache-dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.588194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.588356] env[61972]: DEBUG nova.network.neutron [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.773578] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389491, 'name': Rename_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.042536] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.045513] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.687s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.045720] env[61972]: DEBUG nova.objects.instance [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lazy-loading 'resources' on Instance uuid 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.067940] env[61972]: INFO nova.scheduler.client.report [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocations for instance 84e07f61-2111-43cb-93a2-9cb47ac52503 [ 957.117428] env[61972]: DEBUG nova.network.neutron [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.235430] env[61972]: DEBUG nova.network.neutron [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Updating instance_info_cache with network_info: [{"id": "eaccfe94-cbc5-4a94-b940-24e8d40565e2", "address": "fa:16:3e:15:e5:99", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaccfe94-cb", "ovs_interfaceid": "eaccfe94-cbc5-4a94-b940-24e8d40565e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.273942] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389491, 'name': Rename_Task, 'duration_secs': 1.143647} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.274302] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 957.274638] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ca9ad79-b423-4ae7-9269-7df4201b1907 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.280536] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 957.280536] env[61972]: value = "task-1389492" [ 957.280536] env[61972]: _type = "Task" [ 957.280536] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.288678] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.302679] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 957.334452] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.334661] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.334822] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.335017] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.335186] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.335338] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.335547] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.335706] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.335874] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.336054] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.336240] env[61972]: DEBUG nova.virt.hardware [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.337146] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b359011-72ab-468f-80bd-86be17389c98 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.345475] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30088329-8137-4648-9698-7ae8e302c7b6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.548713] env[61972]: DEBUG nova.objects.instance [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lazy-loading 'numa_topology' on Instance uuid 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.577944] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b354f1cb-f7b6-4ae9-8e63-2ea4514096a8 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "84e07f61-2111-43cb-93a2-9cb47ac52503" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.266s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.668415] env[61972]: DEBUG nova.compute.manager [req-169b0a21-f46e-493e-9d2f-f900a35b2d80 req-92948d9a-7035-430e-8165-d62a80a2faf3 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Received event network-vif-plugged-78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 957.668597] env[61972]: DEBUG oslo_concurrency.lockutils [req-169b0a21-f46e-493e-9d2f-f900a35b2d80 req-92948d9a-7035-430e-8165-d62a80a2faf3 service nova] Acquiring lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.668773] env[61972]: DEBUG oslo_concurrency.lockutils [req-169b0a21-f46e-493e-9d2f-f900a35b2d80 req-92948d9a-7035-430e-8165-d62a80a2faf3 service nova] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.668949] env[61972]: DEBUG oslo_concurrency.lockutils [req-169b0a21-f46e-493e-9d2f-f900a35b2d80 req-92948d9a-7035-430e-8165-d62a80a2faf3 service nova] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.669133] env[61972]: DEBUG nova.compute.manager [req-169b0a21-f46e-493e-9d2f-f900a35b2d80 req-92948d9a-7035-430e-8165-d62a80a2faf3 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] No waiting events found dispatching network-vif-plugged-78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 957.669381] env[61972]: WARNING nova.compute.manager [req-169b0a21-f46e-493e-9d2f-f900a35b2d80 req-92948d9a-7035-430e-8165-d62a80a2faf3 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Received unexpected event network-vif-plugged-78188d45-b47e-4f77-b0d9-e6fa69c90cd7 for instance with vm_state building and task_state spawning. [ 957.737859] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.738274] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Instance network_info: |[{"id": "eaccfe94-cbc5-4a94-b940-24e8d40565e2", "address": "fa:16:3e:15:e5:99", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaccfe94-cb", "ovs_interfaceid": "eaccfe94-cbc5-4a94-b940-24e8d40565e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 957.738696] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:e5:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaccfe94-cbc5-4a94-b940-24e8d40565e2', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.747311] env[61972]: DEBUG oslo.service.loopingcall [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.747311] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.747311] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae39455b-1d01-4439-a25a-05725ef2673b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.766897] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.766897] env[61972]: value = "task-1389493" [ 957.766897] env[61972]: _type = "Task" [ 957.766897] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.775705] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389493, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.776548] env[61972]: DEBUG nova.network.neutron [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Successfully updated port: 78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.790268] env[61972]: DEBUG oslo_vmware.api [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389492, 'name': PowerOnVM_Task, 'duration_secs': 0.424154} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.790510] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.790703] env[61972]: INFO nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Took 8.44 seconds to spawn the instance on the hypervisor. [ 957.790886] env[61972]: DEBUG nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 957.791667] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afe4251-45a9-46c6-b37d-b4fd7c30ee34 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.052265] env[61972]: DEBUG nova.objects.base [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Object Instance<56e21cf4-4dbc-4f72-97c0-082dd689c046> lazy-loaded attributes: resources,numa_topology {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 958.190267] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081884ca-0e4c-4bf2-a4f2-98ffa634699e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.198178] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866fbc0c-bd64-4584-a3c1-0bbc78776879 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.229433] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92f0237-05a3-42a5-85aa-7b8f13ae684e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.236812] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbefbb90-5ba8-4567-aa3e-9a9624671bd6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.249521] env[61972]: DEBUG nova.compute.provider_tree [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.276785] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389493, 'name': CreateVM_Task, 'duration_secs': 0.293491} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.277081] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.277792] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.278013] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.278332] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.278890] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.278890] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.279112] env[61972]: DEBUG nova.network.neutron [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.280034] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d32c7c7-c634-40aa-bd9e-8106244dd087 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.284993] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 958.284993] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52dda763-d376-6ed4-5e07-9e08b0d02de1" [ 958.284993] env[61972]: _type = "Task" [ 958.284993] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.294126] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52dda763-d376-6ed4-5e07-9e08b0d02de1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.310142] env[61972]: INFO nova.compute.manager [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Took 17.10 seconds to build instance. [ 958.754031] env[61972]: DEBUG nova.scheduler.client.report [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 958.794769] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52dda763-d376-6ed4-5e07-9e08b0d02de1, 'name': SearchDatastore_Task, 'duration_secs': 0.010881} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.795187] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.795345] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.795578] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.795728] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.795934] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.796209] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c411c4fa-ad10-4b08-897e-a05945654d1c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.804081] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.804294] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.804990] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8949ee7a-cf92-4965-bef4-41650b713e20 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.810039] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 958.810039] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52aa3b3d-5b4f-8bce-add4-d15040391fd8" [ 958.810039] env[61972]: _type = "Task" [ 958.810039] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.810481] env[61972]: DEBUG oslo_concurrency.lockutils [None req-042819d4-797b-4184-b319-8a1bed6834a1 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.620s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.814048] env[61972]: DEBUG nova.network.neutron [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.820078] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52aa3b3d-5b4f-8bce-add4-d15040391fd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.943373] env[61972]: DEBUG nova.network.neutron [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [{"id": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "address": "fa:16:3e:2d:f5:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78188d45-b4", "ovs_interfaceid": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.258121] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.260621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.222s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.260846] env[61972]: DEBUG nova.objects.instance [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lazy-loading 'resources' on Instance uuid b03b1fe7-2eda-4505-a6f9-19c570b15d1e {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.320539] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52aa3b3d-5b4f-8bce-add4-d15040391fd8, 'name': SearchDatastore_Task, 'duration_secs': 0.009333} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.321325] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-410ae967-a175-424d-9d7f-40441af80468 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.326796] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 959.326796] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bdaf5b-b617-7296-2a85-f26b1aec2195" [ 959.326796] env[61972]: _type = "Task" [ 959.326796] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.334150] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bdaf5b-b617-7296-2a85-f26b1aec2195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.447836] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.447836] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Instance network_info: |[{"id": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "address": "fa:16:3e:2d:f5:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78188d45-b4", "ovs_interfaceid": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 959.448118] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:f5:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78188d45-b47e-4f77-b0d9-e6fa69c90cd7', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.455809] env[61972]: DEBUG oslo.service.loopingcall [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.456091] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.456365] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3698df0a-a1ad-453c-a9b3-3bdeadd5fd78 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.476453] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.476453] env[61972]: value = "task-1389494" [ 959.476453] env[61972]: _type = "Task" [ 959.476453] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.485307] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389494, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.543637] env[61972]: DEBUG nova.compute.manager [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Received event network-changed-fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 959.543852] env[61972]: DEBUG nova.compute.manager [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Refreshing instance network info cache due to event network-changed-fdefc4b7-2c39-496c-9909-b5e05cbdc1da. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 959.544135] env[61972]: DEBUG oslo_concurrency.lockutils [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] Acquiring lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.544326] env[61972]: DEBUG oslo_concurrency.lockutils [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] Acquired lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.544527] env[61972]: DEBUG nova.network.neutron [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Refreshing network info cache for port fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.695941] env[61972]: DEBUG nova.compute.manager [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Received event network-changed-78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 959.696264] env[61972]: DEBUG nova.compute.manager [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Refreshing instance network info cache due to event network-changed-78188d45-b47e-4f77-b0d9-e6fa69c90cd7. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 959.696527] env[61972]: DEBUG oslo_concurrency.lockutils [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] Acquiring lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.696707] env[61972]: DEBUG oslo_concurrency.lockutils [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] Acquired lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.696906] env[61972]: DEBUG nova.network.neutron [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Refreshing network info cache for port 78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.769712] env[61972]: DEBUG oslo_concurrency.lockutils [None req-840cdbcf-8ae8-40f6-a0f1-9d26b24f8b1c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.771s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.770808] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.666s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.770808] env[61972]: INFO nova.compute.manager [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Unshelving [ 959.836955] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bdaf5b-b617-7296-2a85-f26b1aec2195, 'name': SearchDatastore_Task, 'duration_secs': 0.009253} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.839283] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.839551] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7/dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.839966] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39ab408f-a211-41a8-a4d8-d9d0176eca4e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.846481] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 959.846481] env[61972]: value = "task-1389495" [ 959.846481] env[61972]: _type = "Task" [ 959.846481] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.855859] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.919570] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c422be-cbca-41bc-aea9-25b28f6bb584 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.927326] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a353cb-0f63-4756-87d1-a9985fc3ed99 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.957535] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735d0964-dd1a-48b6-bb03-388d32704f7c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.964548] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3533f3d0-46de-4013-861f-eff8aed5d0c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.977384] env[61972]: DEBUG nova.compute.provider_tree [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.985958] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389494, 'name': CreateVM_Task, 'duration_secs': 0.277375} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.986716] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 959.987424] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.987596] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.987926] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.988428] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76c22d9b-0ab2-449d-9c54-a6338e0a0829 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.992664] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 959.992664] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5244804c-18a9-afe4-807c-9ceaf7764d48" [ 959.992664] env[61972]: _type = "Task" [ 959.992664] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.000494] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5244804c-18a9-afe4-807c-9ceaf7764d48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.291304] env[61972]: DEBUG nova.network.neutron [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updated VIF entry in instance network info cache for port fdefc4b7-2c39-496c-9909-b5e05cbdc1da. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.291769] env[61972]: DEBUG nova.network.neutron [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updating instance_info_cache with network_info: [{"id": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "address": "fa:16:3e:d6:59:03", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdefc4b7-2c", "ovs_interfaceid": "fdefc4b7-2c39-496c-9909-b5e05cbdc1da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.357520] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389495, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45071} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.357794] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7/dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.358016] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.358320] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41c6afed-247f-490e-b674-b8d5c1dfd22b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.364541] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 960.364541] env[61972]: value = "task-1389496" [ 960.364541] env[61972]: _type = "Task" [ 960.364541] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.375940] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389496, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.483414] env[61972]: DEBUG nova.scheduler.client.report [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 960.503260] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5244804c-18a9-afe4-807c-9ceaf7764d48, 'name': SearchDatastore_Task, 'duration_secs': 0.012335} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.504206] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.504474] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.504711] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.504920] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.505128] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.505638] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61c68d44-eab0-44c6-9c5b-e145679d710e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.513836] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.514050] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.514762] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddd60008-a854-4bb8-9719-4395441df8b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.520645] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 960.520645] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a6e5fc-261a-d4a9-1b56-10fd15256068" [ 960.520645] env[61972]: _type = "Task" [ 960.520645] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.528232] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a6e5fc-261a-d4a9-1b56-10fd15256068, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.541593] env[61972]: DEBUG nova.network.neutron [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updated VIF entry in instance network info cache for port 78188d45-b47e-4f77-b0d9-e6fa69c90cd7. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.541949] env[61972]: DEBUG nova.network.neutron [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [{"id": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "address": "fa:16:3e:2d:f5:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78188d45-b4", "ovs_interfaceid": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.794366] env[61972]: DEBUG oslo_concurrency.lockutils [req-51547ae6-ca85-40f5-b589-63ff49b4e095 req-bbef590e-5ebe-4c45-874b-ff21372d1a54 service nova] Releasing lock "refresh_cache-f71d004b-5343-4ef3-8f37-8ff544c335a2" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.795980] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.873861] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389496, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062912} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.874274] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 960.875067] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6136423f-361d-4a79-8bc8-b7b40af481ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.896645] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7/dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.896958] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fb1e7b5-7aee-4a93-ab27-441e653cebfa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.916241] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 960.916241] env[61972]: value = "task-1389497" [ 960.916241] env[61972]: _type = "Task" [ 960.916241] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.925971] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.988148] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.990526] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.649s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.990769] env[61972]: DEBUG nova.objects.instance [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'resources' on Instance uuid 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.006565] env[61972]: INFO nova.scheduler.client.report [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Deleted allocations for instance b03b1fe7-2eda-4505-a6f9-19c570b15d1e [ 961.030311] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52a6e5fc-261a-d4a9-1b56-10fd15256068, 'name': SearchDatastore_Task, 'duration_secs': 0.009312} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.031163] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d4f24b9-78d1-4a96-a1d8-753d681efc9e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.036448] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 961.036448] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52234a13-6690-0aaf-9c56-e1570dbd0ca7" [ 961.036448] env[61972]: _type = "Task" [ 961.036448] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.044640] env[61972]: DEBUG oslo_concurrency.lockutils [req-ba8bca02-cec2-47d7-866d-c76e0452f30b req-08457fe3-9318-40b8-827a-b156225f2270 service nova] Releasing lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.045043] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52234a13-6690-0aaf-9c56-e1570dbd0ca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.426129] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389497, 'name': ReconfigVM_Task, 'duration_secs': 0.263503} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.426417] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Reconfigured VM instance instance-0000005c to attach disk [datastore2] dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7/dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.427043] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-242e9c8d-2b5a-4135-9ace-8bf464b330a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.432619] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 961.432619] env[61972]: value = "task-1389498" [ 961.432619] env[61972]: _type = "Task" [ 961.432619] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.440007] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389498, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.494171] env[61972]: DEBUG nova.objects.instance [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'numa_topology' on Instance uuid 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.516858] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4bf48a86-4e25-4e3f-9d10-4b65c4b06213 tempest-ImagesTestJSON-1739945781 tempest-ImagesTestJSON-1739945781-project-member] Lock "b03b1fe7-2eda-4505-a6f9-19c570b15d1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.315s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.545895] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52234a13-6690-0aaf-9c56-e1570dbd0ca7, 'name': SearchDatastore_Task, 'duration_secs': 0.011042} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.546175] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.546442] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8/8745c578-de46-4ade-bf08-f0bc9bb300d8.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.546698] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b790c234-5fac-4ec2-89d7-c0bdfc9f86f8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.552695] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 961.552695] env[61972]: value = "task-1389499" [ 961.552695] env[61972]: _type = "Task" [ 961.552695] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.560214] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.943587] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389498, 'name': Rename_Task, 'duration_secs': 0.385176} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.943902] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.944218] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a194d0c6-8c71-4c7f-b73a-6147c77dedc7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.950468] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 961.950468] env[61972]: value = "task-1389500" [ 961.950468] env[61972]: _type = "Task" [ 961.950468] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.959024] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.996952] env[61972]: DEBUG nova.objects.base [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Object Instance<9562558a-89ba-4169-bd0a-ad31fc0c33bc> lazy-loaded attributes: resources,numa_topology {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 962.062622] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389499, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.167470] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c378eef5-2e7c-4669-a707-9aa2ec4e5d79 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.175507] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5d2ce4-18c1-4a98-a899-cd5f0b980946 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.209265] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795771e7-3da8-4f89-8568-36ec70e3fee2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.217683] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7601506d-2aec-4e7b-9280-1b61b435d03a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.233121] env[61972]: DEBUG nova.compute.provider_tree [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.460196] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389500, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.564531] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695135} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.564856] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8/8745c578-de46-4ade-bf08-f0bc9bb300d8.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.565173] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.565396] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52b8f6d3-5d20-49cb-8a14-09d0ea262991 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.572515] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 962.572515] env[61972]: value = "task-1389501" [ 962.572515] env[61972]: _type = "Task" [ 962.572515] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.580700] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389501, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.736805] env[61972]: DEBUG nova.scheduler.client.report [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 962.967263] env[61972]: DEBUG oslo_vmware.api [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389500, 'name': PowerOnVM_Task, 'duration_secs': 0.678428} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.968101] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.968101] env[61972]: INFO nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Took 8.00 seconds to spawn the instance on the hypervisor. [ 962.968101] env[61972]: DEBUG nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 962.968861] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd034b73-386f-4290-937d-364a20ec31ca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.082520] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389501, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067449} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.082627] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.083449] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27db13f9-4beb-4143-acc9-24809a14b324 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.105189] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8/8745c578-de46-4ade-bf08-f0bc9bb300d8.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.105477] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b6e89b4-b9a4-42ca-980a-523e9c734bd0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.125816] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 963.125816] env[61972]: value = "task-1389502" [ 963.125816] env[61972]: _type = "Task" [ 963.125816] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.133959] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.243757] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.253s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.247717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.651s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.247965] env[61972]: INFO nova.compute.claims [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.488511] env[61972]: INFO nova.compute.manager [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Took 15.90 seconds to build instance. [ 963.638828] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.757430] env[61972]: DEBUG oslo_concurrency.lockutils [None req-38f77305-2df4-4c38-bcef-c653428f4efe tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.075s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.757747] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 9.077s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.757928] env[61972]: INFO nova.compute.manager [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Unshelving [ 963.990747] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4eaa90ef-3d1c-4e67-9861-4633bec4c5d8 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.418s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.137469] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389502, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.531795] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faa9e85-79b5-4723-9df0-7b4ffbcb17c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.540077] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5aa36c-ee75-4d68-9ee6-944444a2c1a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.572519] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d19fe5-ca9b-4496-92e0-98dfa2ecb0cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.580152] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fec73d-af59-4c40-8d6f-b75c763f8a26 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.593588] env[61972]: DEBUG nova.compute.provider_tree [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.638994] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389502, 'name': ReconfigVM_Task, 'duration_secs': 1.031132} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.639391] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8/8745c578-de46-4ade-bf08-f0bc9bb300d8.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.640026] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-904da0fd-be0c-4ca0-9f82-f9dfb4564015 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.646074] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 964.646074] env[61972]: value = "task-1389503" [ 964.646074] env[61972]: _type = "Task" [ 964.646074] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.654144] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389503, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.780222] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.007251] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.007655] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.007852] env[61972]: DEBUG nova.compute.manager [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 965.008792] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358236a4-799c-4422-bd41-c4bd9e2a8a16 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.016110] env[61972]: DEBUG nova.compute.manager [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61972) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 965.016679] env[61972]: DEBUG nova.objects.instance [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'flavor' on Instance uuid dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.096277] env[61972]: DEBUG nova.scheduler.client.report [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 965.162023] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389503, 'name': Rename_Task, 'duration_secs': 0.148195} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.162023] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.162023] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d41458bc-4e25-4a61-905e-39f73e18db9c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.168669] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 965.168669] env[61972]: value = "task-1389504" [ 965.168669] env[61972]: _type = "Task" [ 965.168669] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.177216] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.601540] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.602078] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 965.604800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.806s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.608086] env[61972]: INFO nova.compute.claims [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.683711] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389504, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.024855] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.025219] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80adbb36-8d1e-4114-aa78-a1b21740e1ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.032555] env[61972]: DEBUG oslo_vmware.api [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 966.032555] env[61972]: value = "task-1389505" [ 966.032555] env[61972]: _type = "Task" [ 966.032555] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.044309] env[61972]: DEBUG oslo_vmware.api [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389505, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.112539] env[61972]: DEBUG nova.compute.utils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 966.117794] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 966.117794] env[61972]: DEBUG nova.network.neutron [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 966.179048] env[61972]: DEBUG oslo_vmware.api [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389504, 'name': PowerOnVM_Task, 'duration_secs': 0.708482} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.179389] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.179535] env[61972]: INFO nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Took 8.88 seconds to spawn the instance on the hypervisor. [ 966.179703] env[61972]: DEBUG nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 966.180488] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8acf2ef-3381-4f93-92f4-d19e8816044c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.200806] env[61972]: DEBUG nova.policy [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 966.501696] env[61972]: DEBUG nova.network.neutron [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Successfully created port: 582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.543935] env[61972]: DEBUG oslo_vmware.api [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389505, 'name': PowerOffVM_Task, 'duration_secs': 0.49295} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.547580] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 966.548064] env[61972]: DEBUG nova.compute.manager [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 966.549708] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d60ce38-2f44-4c3d-8592-014e0138a2bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.616649] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 966.699115] env[61972]: INFO nova.compute.manager [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Took 17.94 seconds to build instance. [ 966.924021] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2219e1-f528-4376-b2f7-a33e127b95f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.930707] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f576e88-4bc4-4f9c-9799-65af3eec512d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.963017] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515016e9-126e-44ed-9515-907eb90c9d6b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.971293] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a60d154-d5fc-488e-ba32-4c8c820e744b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.985723] env[61972]: DEBUG nova.compute.provider_tree [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.066649] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6abbebc9-616a-4cfd-98ed-ab4ef6e40079 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.201161] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d218bdf0-d499-46af-9798-a6423fe307a9 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.450s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.491163] env[61972]: DEBUG nova.scheduler.client.report [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 967.504047] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.504349] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.504568] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.504791] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.504942] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.507771] env[61972]: INFO nova.compute.manager [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Terminating instance [ 967.627064] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 967.656324] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 967.656570] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 967.656728] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.656910] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 967.657072] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.657230] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 967.657443] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 967.657837] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 967.658113] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 967.658316] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 967.658499] env[61972]: DEBUG nova.virt.hardware [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.659370] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1dd7bc-354f-4185-9847-303a2998fee2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.667683] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc18d66d-7a55-4e02-9b50-13efa5a9aa53 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.996056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.996729] env[61972]: DEBUG nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 967.999582] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.905s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.000991] env[61972]: INFO nova.compute.claims [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 968.012254] env[61972]: DEBUG nova.compute.manager [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 968.012621] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.014123] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4dd068-d224-44c5-83a6-3fa25b42e9d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.021881] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.022458] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88e76409-a60f-4ed8-b2e5-ba569186edfc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.082266] env[61972]: DEBUG nova.compute.manager [req-6f2c22d3-46c0-4611-ba5b-ec44ae9662b2 req-79af716e-84e3-41a8-9595-dbcad9266abe service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Received event network-vif-plugged-582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 968.082674] env[61972]: DEBUG oslo_concurrency.lockutils [req-6f2c22d3-46c0-4611-ba5b-ec44ae9662b2 req-79af716e-84e3-41a8-9595-dbcad9266abe service nova] Acquiring lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.082674] env[61972]: DEBUG oslo_concurrency.lockutils [req-6f2c22d3-46c0-4611-ba5b-ec44ae9662b2 req-79af716e-84e3-41a8-9595-dbcad9266abe service nova] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.082854] env[61972]: DEBUG oslo_concurrency.lockutils [req-6f2c22d3-46c0-4611-ba5b-ec44ae9662b2 req-79af716e-84e3-41a8-9595-dbcad9266abe service nova] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.083255] env[61972]: DEBUG nova.compute.manager [req-6f2c22d3-46c0-4611-ba5b-ec44ae9662b2 req-79af716e-84e3-41a8-9595-dbcad9266abe service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] No waiting events found dispatching network-vif-plugged-582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 968.083472] env[61972]: WARNING nova.compute.manager [req-6f2c22d3-46c0-4611-ba5b-ec44ae9662b2 req-79af716e-84e3-41a8-9595-dbcad9266abe service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Received unexpected event network-vif-plugged-582a2b0a-d087-426c-b734-d6fc7ececb7d for instance with vm_state building and task_state spawning. [ 968.097781] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.098029] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.098235] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleting the datastore file [datastore2] dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.098506] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-392e89b1-6f29-485d-b94e-9c58661c8954 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.109300] env[61972]: DEBUG oslo_vmware.api [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 968.109300] env[61972]: value = "task-1389507" [ 968.109300] env[61972]: _type = "Task" [ 968.109300] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.119518] env[61972]: DEBUG oslo_vmware.api [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.499495] env[61972]: DEBUG nova.network.neutron [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Successfully updated port: 582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.506008] env[61972]: DEBUG nova.compute.utils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 968.512234] env[61972]: DEBUG nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 968.619425] env[61972]: DEBUG oslo_vmware.api [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195887} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.619804] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.620038] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 968.620266] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 968.620475] env[61972]: INFO nova.compute.manager [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 968.620795] env[61972]: DEBUG oslo.service.loopingcall [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.621046] env[61972]: DEBUG nova.compute.manager [-] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 968.621211] env[61972]: DEBUG nova.network.neutron [-] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.847513] env[61972]: DEBUG nova.compute.manager [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Stashing vm_state: active {{(pid=61972) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 969.002244] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.002362] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.002585] env[61972]: DEBUG nova.network.neutron [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.013600] env[61972]: DEBUG nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 969.190966] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ae8d94-c298-45eb-a0d2-f1c786a7dfd7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.198674] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72574ada-c0cb-40bc-a0e9-a31cca8faba4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.230746] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611f7afb-8164-46ac-b507-2b16696c02d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.238337] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fb5b8d-0e40-4c73-8958-cb30772001fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.251518] env[61972]: DEBUG nova.compute.provider_tree [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.363931] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.414283] env[61972]: DEBUG nova.network.neutron [-] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.559742] env[61972]: DEBUG nova.network.neutron [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.755040] env[61972]: DEBUG nova.scheduler.client.report [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 969.780873] env[61972]: DEBUG nova.network.neutron [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Updating instance_info_cache with network_info: [{"id": "582a2b0a-d087-426c-b734-d6fc7ececb7d", "address": "fa:16:3e:de:4e:7c", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap582a2b0a-d0", "ovs_interfaceid": "582a2b0a-d087-426c-b734-d6fc7ececb7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.917414] env[61972]: INFO nova.compute.manager [-] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Took 1.30 seconds to deallocate network for instance. [ 970.024865] env[61972]: DEBUG nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 970.049635] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.049883] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.050051] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.050245] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.050395] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.050545] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.050757] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.050918] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.051097] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.051264] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.051437] env[61972]: DEBUG nova.virt.hardware [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.052326] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7425a63a-9482-4777-977b-53490f5ce2a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.060468] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d736f6d-2af1-4cac-aeca-425940829f2b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.073455] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.078772] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Creating folder: Project (c626ccddb4dd4aada96840a047bbe485). Parent ref: group-v294799. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.079045] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-691e6c5e-f584-4a4f-a527-c4ac6137bdac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.088903] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Created folder: Project (c626ccddb4dd4aada96840a047bbe485) in parent group-v294799. [ 970.089112] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Creating folder: Instances. Parent ref: group-v294906. {{(pid=61972) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.089311] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dede01a0-1173-4299-82df-eb78d55f0e28 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.098130] env[61972]: INFO nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Created folder: Instances in parent group-v294906. [ 970.098360] env[61972]: DEBUG oslo.service.loopingcall [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.098534] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.098717] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd108b55-fcc5-4e7c-b407-5af1bec869e1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.114134] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.114134] env[61972]: value = "task-1389510" [ 970.114134] env[61972]: _type = "Task" [ 970.114134] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.120847] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389510, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.259709] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.260268] env[61972]: DEBUG nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 970.263110] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.929s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.263295] env[61972]: DEBUG nova.objects.instance [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lazy-loading 'resources' on Instance uuid e8582450-36c2-4d6b-89ee-6fef324063c4 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.283890] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.284229] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Instance network_info: |[{"id": "582a2b0a-d087-426c-b734-d6fc7ececb7d", "address": "fa:16:3e:de:4e:7c", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap582a2b0a-d0", "ovs_interfaceid": "582a2b0a-d087-426c-b734-d6fc7ececb7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 970.284727] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:4e:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '582a2b0a-d087-426c-b734-d6fc7ececb7d', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.292259] env[61972]: DEBUG oslo.service.loopingcall [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.293135] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.293135] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5db69a82-03bf-4a7a-81ea-265c0523cca6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.310088] env[61972]: DEBUG nova.compute.manager [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Received event network-changed-582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 970.310317] env[61972]: DEBUG nova.compute.manager [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Refreshing instance network info cache due to event network-changed-582a2b0a-d087-426c-b734-d6fc7ececb7d. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 970.310466] env[61972]: DEBUG oslo_concurrency.lockutils [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] Acquiring lock "refresh_cache-e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.310609] env[61972]: DEBUG oslo_concurrency.lockutils [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] Acquired lock "refresh_cache-e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.310770] env[61972]: DEBUG nova.network.neutron [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Refreshing network info cache for port 582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.317254] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.317254] env[61972]: value = "task-1389511" [ 970.317254] env[61972]: _type = "Task" [ 970.317254] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.326105] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389511, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.424465] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.623457] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389510, 'name': CreateVM_Task, 'duration_secs': 0.293099} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.623701] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.624153] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.624321] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.624658] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 970.624911] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-343d73a9-6264-4480-bca8-87295b999a83 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.629134] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 970.629134] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52854598-a44f-7d91-92dc-479043b352b8" [ 970.629134] env[61972]: _type = "Task" [ 970.629134] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.636435] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52854598-a44f-7d91-92dc-479043b352b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.766074] env[61972]: DEBUG nova.compute.utils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.767558] env[61972]: DEBUG nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Not allocating networking since 'none' was specified. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 970.827557] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389511, 'name': CreateVM_Task, 'duration_secs': 0.297243} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.827721] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.828378] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.910669] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3be9a6-9195-4cb8-83a8-4489ef7f06b3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.920437] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8f95c0-6fb2-4e33-ba85-c1089af06763 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.953788] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978edaa6-070f-4b23-a9de-39dc222b5085 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.961511] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97b3f7a-ce74-41a3-bd5f-092eddb01004 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.975113] env[61972]: DEBUG nova.compute.provider_tree [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.083215] env[61972]: DEBUG nova.network.neutron [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Updated VIF entry in instance network info cache for port 582a2b0a-d087-426c-b734-d6fc7ececb7d. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 971.083581] env[61972]: DEBUG nova.network.neutron [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Updating instance_info_cache with network_info: [{"id": "582a2b0a-d087-426c-b734-d6fc7ececb7d", "address": "fa:16:3e:de:4e:7c", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap582a2b0a-d0", "ovs_interfaceid": "582a2b0a-d087-426c-b734-d6fc7ececb7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.139437] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52854598-a44f-7d91-92dc-479043b352b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009187} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.139766] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.140017] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.140256] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.140408] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.140588] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.140956] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.141281] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.141513] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb305a22-faff-488a-9df5-c5799407cdb4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.143290] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d72630c-5158-48c2-85ab-5f52c15c48b0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.148951] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 971.148951] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7840e-ea4b-05fb-ccf0-b005ec5af1dc" [ 971.148951] env[61972]: _type = "Task" [ 971.148951] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.152214] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.152397] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.153419] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3813a7ea-28f9-478c-b37a-765cf1ef55d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.158605] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7840e-ea4b-05fb-ccf0-b005ec5af1dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.161962] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 971.161962] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5227371f-7d46-64e2-fcdf-66f6d24cda13" [ 971.161962] env[61972]: _type = "Task" [ 971.161962] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.168704] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5227371f-7d46-64e2-fcdf-66f6d24cda13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.269227] env[61972]: DEBUG nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 971.478071] env[61972]: DEBUG nova.scheduler.client.report [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 971.586614] env[61972]: DEBUG oslo_concurrency.lockutils [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] Releasing lock "refresh_cache-e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.586890] env[61972]: DEBUG nova.compute.manager [req-c0fff64f-79b5-4c9c-89c3-a876b375c13e req-081982be-dd92-40a7-a458-daaaa43d3dd5 service nova] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Received event network-vif-deleted-eaccfe94-cbc5-4a94-b940-24e8d40565e2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 971.658611] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7840e-ea4b-05fb-ccf0-b005ec5af1dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010711} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.658912] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.659162] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.659394] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.670041] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5227371f-7d46-64e2-fcdf-66f6d24cda13, 'name': SearchDatastore_Task, 'duration_secs': 0.033657} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.670041] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e597412-b159-4d21-89d8-8cf21c02a3a3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.674736] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 971.674736] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d4fbf6-5778-0726-bd9e-88c0ed4db465" [ 971.674736] env[61972]: _type = "Task" [ 971.674736] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.681470] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d4fbf6-5778-0726-bd9e-88c0ed4db465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.984378] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.986931] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.190s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.986931] env[61972]: DEBUG nova.objects.instance [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lazy-loading 'pci_requests' on Instance uuid 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.001462] env[61972]: INFO nova.scheduler.client.report [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Deleted allocations for instance e8582450-36c2-4d6b-89ee-6fef324063c4 [ 972.186854] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d4fbf6-5778-0726-bd9e-88c0ed4db465, 'name': SearchDatastore_Task, 'duration_secs': 0.041707} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.187141] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.187407] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] bf32c8b2-51b4-495a-b340-5dbabdf33137/bf32c8b2-51b4-495a-b340-5dbabdf33137.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 972.187722] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.187918] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.188160] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e93de6d-6b97-4ff9-8163-cba539b3abd0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.190160] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2e860b5-f6b4-4b7c-82ac-3670ea7caf91 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.196741] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 972.196741] env[61972]: value = "task-1389512" [ 972.196741] env[61972]: _type = "Task" [ 972.196741] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.200876] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.201074] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.202068] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51bf00a3-4e2b-4484-aa7f-59dafe6ab2bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.207059] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.209924] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 972.209924] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fb08ce-9344-3309-f2a5-8a407ee52229" [ 972.209924] env[61972]: _type = "Task" [ 972.209924] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.216969] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fb08ce-9344-3309-f2a5-8a407ee52229, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.278554] env[61972]: DEBUG nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 972.309565] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 972.309828] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 972.309987] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.310190] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 972.310344] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.310493] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 972.310704] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 972.310866] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 972.311079] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 972.311259] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 972.311433] env[61972]: DEBUG nova.virt.hardware [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.312310] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb45ff9-d106-47aa-a47d-6a1efb6c64f8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.319988] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c959c8c-8fbb-4101-9d18-73f65ac354ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.332927] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.333389] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.339060] env[61972]: DEBUG oslo.service.loopingcall [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.339283] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.340596] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.341346] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28ca2426-6a96-49bd-aab8-9273740c02c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.358954] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.358954] env[61972]: value = "task-1389513" [ 972.358954] env[61972]: _type = "Task" [ 972.358954] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.367954] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389513, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.490442] env[61972]: DEBUG nova.objects.instance [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lazy-loading 'numa_topology' on Instance uuid 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.509476] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4a65f0c6-e5e0-4684-94cc-e4ad511bac9a tempest-ServersAaction247Test-2102497308 tempest-ServersAaction247Test-2102497308-project-member] Lock "e8582450-36c2-4d6b-89ee-6fef324063c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.379s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.706797] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461115} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.707096] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] bf32c8b2-51b4-495a-b340-5dbabdf33137/bf32c8b2-51b4-495a-b340-5dbabdf33137.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.707323] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.707589] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36ca364c-efce-4271-8c5f-32e2933a3bc3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.715190] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 972.715190] env[61972]: value = "task-1389514" [ 972.715190] env[61972]: _type = "Task" [ 972.715190] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.722038] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fb08ce-9344-3309-f2a5-8a407ee52229, 'name': SearchDatastore_Task, 'duration_secs': 0.007899} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.723399] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45129c67-7a20-4ba9-b16c-f8c1bdc529d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.728247] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389514, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.731180] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 972.731180] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c99eb3-5e94-6482-5e82-bbc6ddbc61dd" [ 972.731180] env[61972]: _type = "Task" [ 972.731180] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.738610] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c99eb3-5e94-6482-5e82-bbc6ddbc61dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.857668] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.857852] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 972.869469] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389513, 'name': CreateVM_Task, 'duration_secs': 0.447603} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.869676] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.870078] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.870242] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.870545] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.870839] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-385b8964-923f-449d-99a4-a6df46d8c407 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.875823] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 972.875823] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]528a7609-69a3-3ee9-aeed-2b6e33615497" [ 972.875823] env[61972]: _type = "Task" [ 972.875823] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.888815] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]528a7609-69a3-3ee9-aeed-2b6e33615497, 'name': SearchDatastore_Task, 'duration_secs': 0.008552} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.889199] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.889436] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.889643] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.993638] env[61972]: INFO nova.compute.claims [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.225758] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389514, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066036} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.225938] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 973.226862] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d056925d-6d88-48d2-b251-2d0ac866b494 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.247872] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] bf32c8b2-51b4-495a-b340-5dbabdf33137/bf32c8b2-51b4-495a-b340-5dbabdf33137.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.251192] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-873172d6-50c6-4bd4-ac53-345d57c4a36b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.271751] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52c99eb3-5e94-6482-5e82-bbc6ddbc61dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009443} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.273190] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.273469] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d/e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.273831] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 973.273831] env[61972]: value = "task-1389515" [ 973.273831] env[61972]: _type = "Task" [ 973.273831] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.274160] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.274451] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.274762] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-224eba78-5cee-4fc1-ac46-2b86bd4b6704 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.277058] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72ccf8b0-cd81-4f32-ac21-01e3b7e96897 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.287131] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.288389] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 973.288389] env[61972]: value = "task-1389516" [ 973.288389] env[61972]: _type = "Task" [ 973.288389] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.292461] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.292680] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.293947] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94a01e16-7aab-4dfe-9b54-e234a983de36 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.301222] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.302038] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 973.302038] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]523f4032-95d0-ff22-7b7d-a83f18cf0304" [ 973.302038] env[61972]: _type = "Task" [ 973.302038] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.309892] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523f4032-95d0-ff22-7b7d-a83f18cf0304, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.632278] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.632533] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.791027] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389515, 'name': ReconfigVM_Task, 'duration_secs': 0.317893} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.793687] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Reconfigured VM instance instance-0000005f to attach disk [datastore2] bf32c8b2-51b4-495a-b340-5dbabdf33137/bf32c8b2-51b4-495a-b340-5dbabdf33137.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.794603] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b3f2b9d-8ca5-4a9b-bcc3-595b6f4bf599 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.801411] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389516, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.803189] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 973.803189] env[61972]: value = "task-1389517" [ 973.803189] env[61972]: _type = "Task" [ 973.803189] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.819838] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]523f4032-95d0-ff22-7b7d-a83f18cf0304, 'name': SearchDatastore_Task, 'duration_secs': 0.014879} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.821053] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389517, 'name': Rename_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.821852] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3dcc297-732f-4831-98e4-f4ee60392827 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.828167] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 973.828167] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5216b79b-dd6e-5fbd-9c24-9ce8b597a5a5" [ 973.828167] env[61972]: _type = "Task" [ 973.828167] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.836529] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5216b79b-dd6e-5fbd-9c24-9ce8b597a5a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.864194] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Skipping network cache update for instance because it has been migrated to another host. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10301}} [ 974.136219] env[61972]: DEBUG nova.compute.utils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 974.138250] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201bd455-11e3-401e-b5c4-817ebb60328e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.147734] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe50876-87e7-4d25-bdb6-931343c3caa5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.177210] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e80d131-dca5-4349-8538-5107df546e41 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.184518] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b427100-c10a-45dc-84b6-67a739fa5799 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.197392] env[61972]: DEBUG nova.compute.provider_tree [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.301764] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662343} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.302071] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d/e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.302249] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.302503] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14e48cbf-719c-4ef5-8df3-baeb1a4b0213 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.310502] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 974.310502] env[61972]: value = "task-1389518" [ 974.310502] env[61972]: _type = "Task" [ 974.310502] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.316430] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389517, 'name': Rename_Task, 'duration_secs': 0.191637} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.317014] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.317281] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f88ada7f-99e4-4372-a3d6-38ee41279105 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.321417] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389518, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.325296] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 974.325296] env[61972]: value = "task-1389519" [ 974.325296] env[61972]: _type = "Task" [ 974.325296] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.334845] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.339997] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5216b79b-dd6e-5fbd-9c24-9ce8b597a5a5, 'name': SearchDatastore_Task, 'duration_secs': 0.053213} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.340266] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.340527] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.340770] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-253cfea1-e15b-4976-8064-87e52fbce03f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.346255] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 974.346255] env[61972]: value = "task-1389520" [ 974.346255] env[61972]: _type = "Task" [ 974.346255] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.353993] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.397691] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.397846] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquired lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.397994] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Forcefully refreshing network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 974.643428] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.700596] env[61972]: DEBUG nova.scheduler.client.report [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 974.819886] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389518, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065228} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.820173] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.820942] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969276fd-e3cc-47f6-b0c1-e7b326b0b9b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.842018] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d/e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.845145] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33bdbeb0-9572-45f5-87b8-d0536fad21d4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.865014] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389519, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.866976] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 974.866976] env[61972]: value = "task-1389521" [ 974.866976] env[61972]: _type = "Task" [ 974.866976] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.872749] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455653} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.873311] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.873528] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.873767] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54bbe5b1-9781-43f0-b258-f9d40d34b498 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.878167] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.883244] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 974.883244] env[61972]: value = "task-1389522" [ 974.883244] env[61972]: _type = "Task" [ 974.883244] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.890503] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389522, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.206288] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.220s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.210212] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.430s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.210829] env[61972]: DEBUG nova.objects.instance [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'pci_requests' on Instance uuid 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.306093] env[61972]: INFO nova.network.neutron [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating port 89e228e1-2aac-4e05-98ee-5c29dd44f55b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 975.344237] env[61972]: DEBUG oslo_vmware.api [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389519, 'name': PowerOnVM_Task, 'duration_secs': 0.576105} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.344514] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 975.344713] env[61972]: INFO nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Took 5.32 seconds to spawn the instance on the hypervisor. [ 975.344894] env[61972]: DEBUG nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 975.345674] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b0f167-9f18-4540-93fc-05310052a91d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.375977] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389521, 'name': ReconfigVM_Task, 'duration_secs': 0.288463} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.376307] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Reconfigured VM instance instance-0000005e to attach disk [datastore2] e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d/e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.376916] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98317dcd-3522-4024-9ff8-895bc3902b37 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.382968] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 975.382968] env[61972]: value = "task-1389523" [ 975.382968] env[61972]: _type = "Task" [ 975.382968] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.394388] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389523, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.397393] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389522, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083093} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.397636] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.398375] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbb5020-d71f-4305-a990-6bce12016215 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.423247] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.423755] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f59a69d-cb2b-4761-9195-d1906a816211 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.446965] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 975.446965] env[61972]: value = "task-1389524" [ 975.446965] env[61972]: _type = "Task" [ 975.446965] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.455259] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.618457] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updating instance_info_cache with network_info: [{"id": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "address": "fa:16:3e:03:c7:bb", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3583e7ca-03", "ovs_interfaceid": "3583e7ca-03b2-4200-8a2a-9394e6cec912", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.702544] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.702930] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.703159] env[61972]: INFO nova.compute.manager [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Attaching volume 49e34489-2d91-47b5-b285-958e3c1e5401 to /dev/sdb [ 975.715349] env[61972]: DEBUG nova.objects.instance [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'numa_topology' on Instance uuid 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.740119] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9def7cec-077f-4a23-8f0e-a2a6ce2b3400 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.747352] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038672e9-7716-46f7-b02f-70b7b4fe8e6b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.761895] env[61972]: DEBUG nova.virt.block_device [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating existing volume attachment record: 9c9beb02-e7df-43db-9f63-44f89fe453e4 {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 975.863847] env[61972]: INFO nova.compute.manager [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Took 21.08 seconds to build instance. [ 975.892401] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389523, 'name': Rename_Task, 'duration_secs': 0.152897} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.892667] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.892903] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18b0063b-3b54-4014-92d9-836aa0f0fdb6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.898403] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 975.898403] env[61972]: value = "task-1389526" [ 975.898403] env[61972]: _type = "Task" [ 975.898403] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.907159] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389526, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.955899] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389524, 'name': ReconfigVM_Task, 'duration_secs': 0.40608} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.956212] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Reconfigured VM instance instance-00000060 to attach disk [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.956861] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56727321-233a-46a1-8b81-9abfa99f733f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.962706] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 975.962706] env[61972]: value = "task-1389527" [ 975.962706] env[61972]: _type = "Task" [ 975.962706] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.970495] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389527, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.124223] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Releasing lock "refresh_cache-72435dc4-eae1-4606-bb32-e7e8e282d0b9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.124534] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updated the network info_cache for instance {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 976.124877] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.125169] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.125527] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.125749] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.125970] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.126164] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.126318] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 976.126478] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.217367] env[61972]: INFO nova.compute.claims [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.365618] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f331e914-ba3a-4de4-9886-8ef41afeb457 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "bf32c8b2-51b4-495a-b340-5dbabdf33137" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.595s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.408801] env[61972]: DEBUG oslo_vmware.api [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389526, 'name': PowerOnVM_Task, 'duration_secs': 0.449929} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.408983] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.409622] env[61972]: INFO nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Took 8.78 seconds to spawn the instance on the hypervisor. [ 976.409622] env[61972]: DEBUG nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 976.410321] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5bae27-79d5-4fc9-ab95-0522dc017c17 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.473536] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389527, 'name': Rename_Task, 'duration_secs': 0.133574} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.473783] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.474054] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-396c1c77-4929-4e4c-87d2-5549196745d5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.479958] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 976.479958] env[61972]: value = "task-1389530" [ 976.479958] env[61972]: _type = "Task" [ 976.479958] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.487819] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.629380] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.848337] env[61972]: DEBUG nova.compute.manager [req-8862c9c9-0781-4209-9cba-eb5f41b7f69c req-624f98fb-8d63-446c-8f68-c1028d0a0bbb service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-vif-plugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 976.848582] env[61972]: DEBUG oslo_concurrency.lockutils [req-8862c9c9-0781-4209-9cba-eb5f41b7f69c req-624f98fb-8d63-446c-8f68-c1028d0a0bbb service nova] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.849936] env[61972]: DEBUG oslo_concurrency.lockutils [req-8862c9c9-0781-4209-9cba-eb5f41b7f69c req-624f98fb-8d63-446c-8f68-c1028d0a0bbb service nova] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.849936] env[61972]: DEBUG oslo_concurrency.lockutils [req-8862c9c9-0781-4209-9cba-eb5f41b7f69c req-624f98fb-8d63-446c-8f68-c1028d0a0bbb service nova] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.849936] env[61972]: DEBUG nova.compute.manager [req-8862c9c9-0781-4209-9cba-eb5f41b7f69c req-624f98fb-8d63-446c-8f68-c1028d0a0bbb service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] No waiting events found dispatching network-vif-plugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 976.849936] env[61972]: WARNING nova.compute.manager [req-8862c9c9-0781-4209-9cba-eb5f41b7f69c req-624f98fb-8d63-446c-8f68-c1028d0a0bbb service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received unexpected event network-vif-plugged-89e228e1-2aac-4e05-98ee-5c29dd44f55b for instance with vm_state shelved_offloaded and task_state spawning. [ 976.927547] env[61972]: INFO nova.compute.manager [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Took 23.35 seconds to build instance. [ 976.946178] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.946178] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.946178] env[61972]: DEBUG nova.network.neutron [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.989651] env[61972]: DEBUG oslo_vmware.api [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389530, 'name': PowerOnVM_Task, 'duration_secs': 0.426673} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.989966] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.990151] env[61972]: INFO nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Took 4.71 seconds to spawn the instance on the hypervisor. [ 976.990333] env[61972]: DEBUG nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 976.991080] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e201b85-be2a-4450-8f43-a6ec9e422c37 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.375923] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0c6a9a-0557-4639-894e-a5a1aee5cf53 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.383865] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617c5c8a-c74b-4ad8-b8d9-23a0142543ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.414031] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa98de4f-38f5-494c-904c-9c1eb23503c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.421637] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22566e37-43a4-4333-980a-c2b166b5d7c7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.434844] env[61972]: DEBUG oslo_concurrency.lockutils [None req-6f1260f7-d01d-42e1-83f0-94a1ac77c4b4 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.864s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.435345] env[61972]: DEBUG nova.compute.provider_tree [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.507197] env[61972]: INFO nova.compute.manager [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Took 22.43 seconds to build instance. [ 977.738309] env[61972]: INFO nova.compute.manager [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Rebuilding instance [ 977.785391] env[61972]: DEBUG nova.compute.manager [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 977.786419] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2dce01-caad-4fad-adb8-a335f12e5594 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.801538] env[61972]: DEBUG nova.network.neutron [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.938574] env[61972]: DEBUG nova.scheduler.client.report [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 978.009775] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e8c7bee1-9257-4ed2-9716-81ed7a207dea tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "f0565271-2276-4f18-813a-6f9338183480" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.936s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.303636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.329534] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b142edfbbef337635b2eb366fbf660a3',container_format='bare',created_at=2024-10-31T12:16:46Z,direct_url=,disk_format='vmdk',id=f2b2602a-d38e-4ffb-b305-ed7666354ac0,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1530779359-shelved',owner='9266fa0d01664ba4a80ff4068cb9b9bc',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2024-10-31T12:17:02Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 978.329824] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 978.329989] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.330216] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 978.330380] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.330533] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 978.330757] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 978.330981] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 978.331225] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 978.331400] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 978.331574] env[61972]: DEBUG nova.virt.hardware [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 978.332483] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef9ffcb-db9d-4eac-8e9a-89ce26f95942 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.340793] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33be4d7e-5e73-42e6-8dce-d64c0208a6bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.355399] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:43:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89e228e1-2aac-4e05-98ee-5c29dd44f55b', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.362635] env[61972]: DEBUG oslo.service.loopingcall [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.362989] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 978.363245] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75bec966-6efc-497d-ba3d-5faec395b462 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.383704] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.383704] env[61972]: value = "task-1389532" [ 978.383704] env[61972]: _type = "Task" [ 978.383704] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.390810] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389532, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.444633] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.234s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.446908] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.083s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.494209] env[61972]: INFO nova.network.neutron [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating port 96b44391-970b-458b-bb63-47288e6d18a2 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 978.630761] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "ff25c137-ba78-4807-bd64-f3075e81dd5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.631642] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.803749] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 978.804165] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-adbed57f-3692-49ff-a6a9-995b36978328 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.811099] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 978.811099] env[61972]: value = "task-1389533" [ 978.811099] env[61972]: _type = "Task" [ 978.811099] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.820054] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389533, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.874925] env[61972]: DEBUG nova.compute.manager [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 978.875198] env[61972]: DEBUG nova.compute.manager [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing instance network info cache due to event network-changed-89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 978.875424] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] Acquiring lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.875571] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] Acquired lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.875796] env[61972]: DEBUG nova.network.neutron [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Refreshing network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.895098] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389532, 'name': CreateVM_Task, 'duration_secs': 0.409013} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.895098] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 978.895796] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.895968] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.896410] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 978.896628] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd1ad933-213d-4bae-bd4e-d4640f5be95e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.901459] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 978.901459] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52360a5c-c3cd-649b-5157-c9164286199a" [ 978.901459] env[61972]: _type = "Task" [ 978.901459] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.911980] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52360a5c-c3cd-649b-5157-c9164286199a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.952141] env[61972]: INFO nova.compute.claims [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.133548] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 979.320605] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389533, 'name': PowerOffVM_Task, 'duration_secs': 0.132396} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.320856] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.321183] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.321936] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c68abb-57c9-4aff-ad53-730733722f56 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.328486] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 979.328714] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee25aa48-e143-4dd8-a278-16ef888f51fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.350879] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 979.351157] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 979.351370] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleting the datastore file [datastore2] f0565271-2276-4f18-813a-6f9338183480 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.351630] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e3700b5-b4ef-4a8b-b19a-38f5c1f3ecc2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.357740] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 979.357740] env[61972]: value = "task-1389535" [ 979.357740] env[61972]: _type = "Task" [ 979.357740] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.365523] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.414723] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.415067] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Processing image f2b2602a-d38e-4ffb-b305-ed7666354ac0 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.415287] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.415469] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.415682] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.415999] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23190f44-1718-4607-bda3-e3b663cc3870 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.438261] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.438607] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.439490] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e6709dc-00bd-4407-ae31-fa6d59ae9105 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.445389] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 979.445389] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5212193a-db25-7aeb-23ed-60802ac44556" [ 979.445389] env[61972]: _type = "Task" [ 979.445389] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.461848] env[61972]: INFO nova.compute.resource_tracker [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating resource usage from migration 955996b3-7d42-4b16-94dc-c5aa7e5ad605 [ 979.465056] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Preparing fetch location {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 979.465056] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Fetch image to [datastore1] OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb/OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb.vmdk {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 979.465474] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Downloading stream optimized image f2b2602a-d38e-4ffb-b305-ed7666354ac0 to [datastore1] OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb/OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb.vmdk on the data store datastore1 as vApp {{(pid=61972) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 979.465560] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Downloading image file data f2b2602a-d38e-4ffb-b305-ed7666354ac0 to the ESX as VM named 'OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb' {{(pid=61972) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 979.549373] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 979.549373] env[61972]: value = "resgroup-9" [ 979.549373] env[61972]: _type = "ResourcePool" [ 979.549373] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 979.549650] env[61972]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-5fceb6ec-c8d9-451c-938a-b352f3f8e391 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.572669] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lease: (returnval){ [ 979.572669] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 979.572669] env[61972]: _type = "HttpNfcLease" [ 979.572669] env[61972]: } obtained for vApp import into resource pool (val){ [ 979.572669] env[61972]: value = "resgroup-9" [ 979.572669] env[61972]: _type = "ResourcePool" [ 979.572669] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 979.573131] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the lease: (returnval){ [ 979.573131] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 979.573131] env[61972]: _type = "HttpNfcLease" [ 979.573131] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 979.584053] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.584053] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 979.584053] env[61972]: _type = "HttpNfcLease" [ 979.584053] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.613205] env[61972]: DEBUG nova.network.neutron [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updated VIF entry in instance network info cache for port 89e228e1-2aac-4e05-98ee-5c29dd44f55b. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.613327] env[61972]: DEBUG nova.network.neutron [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [{"id": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "address": "fa:16:3e:68:43:9d", "network": {"id": "8bff1a25-9939-4436-a9bb-c54446b85c9e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-555487388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9266fa0d01664ba4a80ff4068cb9b9bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e228e1-2a", "ovs_interfaceid": "89e228e1-2aac-4e05-98ee-5c29dd44f55b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.651123] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.655924] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bc975a-680f-4cbb-af1e-8cbb4de1823c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.664211] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636fab3e-7de0-41ee-b94e-04b6e5964674 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.695301] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02dac31e-b596-4bcf-a453-afe2d5d9b605 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.702480] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4134d910-a0a7-4a1a-adbb-b7003fbe6315 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.715248] env[61972]: DEBUG nova.compute.provider_tree [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.868563] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14969} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.868801] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 979.868993] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 979.869194] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.014688] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.014890] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.015090] env[61972]: DEBUG nova.network.neutron [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 980.080798] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 980.080798] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 980.080798] env[61972]: _type = "HttpNfcLease" [ 980.080798] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 980.115947] env[61972]: DEBUG oslo_concurrency.lockutils [req-5f2fd74d-4810-4183-912a-511a402e3d53 req-cfae4c4f-4c30-40ab-aff2-6326962e7acc service nova] Releasing lock "refresh_cache-56e21cf4-4dbc-4f72-97c0-082dd689c046" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.217924] env[61972]: DEBUG nova.scheduler.client.report [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 980.311121] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Volume attach. Driver type: vmdk {{(pid=61972) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 980.311385] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294912', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'name': 'volume-49e34489-2d91-47b5-b285-958e3c1e5401', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd2864436-05a3-421f-98fd-41df925727c6', 'attached_at': '', 'detached_at': '', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'serial': '49e34489-2d91-47b5-b285-958e3c1e5401'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 980.312291] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d33faa-cfbc-4e21-a6d3-39f6f18ad920 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.328258] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d080a4d-0511-44df-901d-ec333a2f12b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.351727] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-49e34489-2d91-47b5-b285-958e3c1e5401/volume-49e34489-2d91-47b5-b285-958e3c1e5401.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.351918] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90e72a5b-f1a2-4797-ba33-d09604e133a2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.369718] env[61972]: DEBUG oslo_vmware.api [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 980.369718] env[61972]: value = "task-1389537" [ 980.369718] env[61972]: _type = "Task" [ 980.369718] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.380198] env[61972]: DEBUG oslo_vmware.api [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389537, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.581494] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 980.581494] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 980.581494] env[61972]: _type = "HttpNfcLease" [ 980.581494] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 980.723068] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.276s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.723350] env[61972]: INFO nova.compute.manager [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Migrating [ 980.729621] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.305s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.729850] env[61972]: DEBUG nova.objects.instance [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'resources' on Instance uuid dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.759414] env[61972]: DEBUG nova.network.neutron [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.881631] env[61972]: DEBUG oslo_vmware.api [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389537, 'name': ReconfigVM_Task, 'duration_secs': 0.331077} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.881916] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-49e34489-2d91-47b5-b285-958e3c1e5401/volume-49e34489-2d91-47b5-b285-958e3c1e5401.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.886717] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a17d89d0-45cf-46f6-a3e3-1ef3c2584c80 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.901456] env[61972]: DEBUG nova.compute.manager [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-vif-plugged-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 980.901678] env[61972]: DEBUG oslo_concurrency.lockutils [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.901883] env[61972]: DEBUG oslo_concurrency.lockutils [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.902107] env[61972]: DEBUG oslo_concurrency.lockutils [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.902263] env[61972]: DEBUG nova.compute.manager [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] No waiting events found dispatching network-vif-plugged-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 980.902394] env[61972]: WARNING nova.compute.manager [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received unexpected event network-vif-plugged-96b44391-970b-458b-bb63-47288e6d18a2 for instance with vm_state shelved_offloaded and task_state spawning. [ 980.902555] env[61972]: DEBUG nova.compute.manager [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-changed-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 980.902832] env[61972]: DEBUG nova.compute.manager [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Refreshing instance network info cache due to event network-changed-96b44391-970b-458b-bb63-47288e6d18a2. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 980.902934] env[61972]: DEBUG oslo_concurrency.lockutils [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.904972] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.905210] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.905370] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.905552] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.905699] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.905861] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.906056] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.906221] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.906390] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.906552] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.906723] env[61972]: DEBUG nova.virt.hardware [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.907775] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab13bff5-3a3b-455f-b4e0-cb404da1b699 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.911779] env[61972]: DEBUG oslo_vmware.api [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 980.911779] env[61972]: value = "task-1389538" [ 980.911779] env[61972]: _type = "Task" [ 980.911779] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.918241] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a505431-70f2-472b-9c00-c7b59b8bc812 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.925692] env[61972]: DEBUG oslo_vmware.api [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.936012] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance VIF info [] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.941609] env[61972]: DEBUG oslo.service.loopingcall [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.941871] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.942440] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b29c5765-8b45-48cb-931e-a45a9aec4041 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.958922] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.958922] env[61972]: value = "task-1389539" [ 980.958922] env[61972]: _type = "Task" [ 980.958922] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.966307] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389539, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.081510] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 981.081510] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 981.081510] env[61972]: _type = "HttpNfcLease" [ 981.081510] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 981.082053] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 981.082053] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5278311c-fe7b-1408-3003-bca463834bac" [ 981.082053] env[61972]: _type = "HttpNfcLease" [ 981.082053] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 981.082514] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228169b3-9932-4be7-8a43-d978c92c0858 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.089238] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d3d4f-562d-dc85-919c-548844d73539/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 981.089406] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d3d4f-562d-dc85-919c-548844d73539/disk-0.vmdk. {{(pid=61972) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 981.156490] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-75406f64-f99f-4257-b352-314af747fee2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.240600] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.240600] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.241117] env[61972]: DEBUG nova.network.neutron [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.261987] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.265510] env[61972]: DEBUG oslo_concurrency.lockutils [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.265741] env[61972]: DEBUG nova.network.neutron [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Refreshing network info cache for port 96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 981.286981] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b7ea35a14491e2bab865dee977f9f02c',container_format='bare',created_at=2024-10-31T12:16:46Z,direct_url=,disk_format='vmdk',id=5d728988-1ea5-4476-a502-ce99a53302c1,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1794132144-shelved',owner='3fd99c56733940dda5267401c71b9e5d',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2024-10-31T12:17:05Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 981.287257] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.287418] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.287635] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.287748] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.287894] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.288118] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.288281] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.288451] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.288615] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.288856] env[61972]: DEBUG nova.virt.hardware [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.290184] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c259a8-55e6-46ef-8841-26bf11269933 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.301988] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aea6742-e8f6-4401-8720-8eb5023f93e9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.317326] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:b3:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96b44391-970b-458b-bb63-47288e6d18a2', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.324471] env[61972]: DEBUG oslo.service.loopingcall [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.326851] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 981.327266] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0762fef-d55f-4741-8d43-c713d5336cf2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.347057] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.347057] env[61972]: value = "task-1389540" [ 981.347057] env[61972]: _type = "Task" [ 981.347057] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.355362] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389540, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.423613] env[61972]: DEBUG oslo_vmware.api [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389538, 'name': ReconfigVM_Task, 'duration_secs': 0.133191} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.426779] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294912', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'name': 'volume-49e34489-2d91-47b5-b285-958e3c1e5401', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd2864436-05a3-421f-98fd-41df925727c6', 'attached_at': '', 'detached_at': '', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'serial': '49e34489-2d91-47b5-b285-958e3c1e5401'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 981.444048] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812a2f72-5204-4d72-b9d2-d3f53180e89f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.453196] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297f5e5d-fae7-4ed5-85d7-c57751730377 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.493728] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae36cf3-8057-43b0-84df-bc164f285b13 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.499675] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389539, 'name': CreateVM_Task, 'duration_secs': 0.299394} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.500276] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.500775] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.501084] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.501312] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.504874] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4c70527-e79d-4ee3-b94c-01480ca53822 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.507530] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6155107a-7992-4048-9f8c-c3d0a470dc74 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.516314] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 981.516314] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b8456f-f89f-ed51-1a41-ee978b76825a" [ 981.516314] env[61972]: _type = "Task" [ 981.516314] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.527848] env[61972]: DEBUG nova.compute.provider_tree [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.541115] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b8456f-f89f-ed51-1a41-ee978b76825a, 'name': SearchDatastore_Task, 'duration_secs': 0.011593} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.544044] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.544345] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.544617] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.544769] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.544962] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.545493] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-485c4dd4-e6e4-4feb-b206-0037fd9e3a6b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.553567] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.553789] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.556225] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7446a89a-9745-4f03-a915-06dde9feb45c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.561595] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 981.561595] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5214c3bd-9485-67d1-575f-c529c3b1a9f2" [ 981.561595] env[61972]: _type = "Task" [ 981.561595] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.571672] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5214c3bd-9485-67d1-575f-c529c3b1a9f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.858895] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389540, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.992844] env[61972]: DEBUG nova.network.neutron [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updated VIF entry in instance network info cache for port 96b44391-970b-458b-bb63-47288e6d18a2. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.993473] env[61972]: DEBUG nova.network.neutron [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.037625] env[61972]: DEBUG nova.scheduler.client.report [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 982.074179] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5214c3bd-9485-67d1-575f-c529c3b1a9f2, 'name': SearchDatastore_Task, 'duration_secs': 0.00889} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.074958] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64a61bb6-136a-4d26-9e28-e6d97cef3a0e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.080330] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 982.080330] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5265d99e-f35d-f4da-4cda-9a3c2ddc3fc0" [ 982.080330] env[61972]: _type = "Task" [ 982.080330] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.089449] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5265d99e-f35d-f4da-4cda-9a3c2ddc3fc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.310030] env[61972]: DEBUG nova.network.neutron [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [{"id": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "address": "fa:16:3e:2d:f5:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78188d45-b4", "ovs_interfaceid": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.358585] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389540, 'name': CreateVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.468976] env[61972]: DEBUG nova.objects.instance [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.496486] env[61972]: DEBUG oslo_concurrency.lockutils [req-ccd3012e-055b-4ca4-b6e9-0eb9855ebe4a req-61de527e-c648-4b27-9bb6-054acf4c5aa1 service nova] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.542904] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.813s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.547631] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.918s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.547862] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.548067] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 982.548446] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.897s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.550165] env[61972]: INFO nova.compute.claims [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.554034] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4654229a-5bbc-4f19-bf08-c37ddce82650 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.562987] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c59bd2-7d42-44f1-bbd5-d153a3cbe0bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.568445] env[61972]: INFO nova.scheduler.client.report [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocations for instance dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7 [ 982.593081] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d4c776-3697-45c3-b116-4edd3c184742 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.605780] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5265d99e-f35d-f4da-4cda-9a3c2ddc3fc0, 'name': SearchDatastore_Task, 'duration_secs': 0.008782} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.607850] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.608121] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.611046] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55623ec5-159d-4d20-b038-69ccf34e3fd9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.613852] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6cff27-8808-486a-852e-d51140d01366 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.647230] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180399MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 982.647491] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.649544] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 982.649544] env[61972]: value = "task-1389541" [ 982.649544] env[61972]: _type = "Task" [ 982.649544] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.658285] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.718422] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Completed reading data from the image iterator. {{(pid=61972) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 982.718731] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d3d4f-562d-dc85-919c-548844d73539/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 982.720467] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6f0e18-8972-49a3-b1cb-d6b06f7dc538 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.727262] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d3d4f-562d-dc85-919c-548844d73539/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 982.727480] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d3d4f-562d-dc85-919c-548844d73539/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 982.727759] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-0b68a9f4-e87b-4bf5-a4f7-d6312ee1a3a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.813264] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.858835] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389540, 'name': CreateVM_Task, 'duration_secs': 1.350155} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.859062] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.859761] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.859934] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.860359] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 982.860621] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47cce4c6-6c7e-4964-8b50-22dfd8a5a4db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.865759] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 982.865759] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525ec8e5-31af-3818-ea96-8104ee8868d7" [ 982.865759] env[61972]: _type = "Task" [ 982.865759] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.874449] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525ec8e5-31af-3818-ea96-8104ee8868d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.901193] env[61972]: DEBUG oslo_vmware.rw_handles [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d3d4f-562d-dc85-919c-548844d73539/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 982.901265] env[61972]: INFO nova.virt.vmwareapi.images [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Downloaded image file data f2b2602a-d38e-4ffb-b305-ed7666354ac0 [ 982.902166] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff88c484-e45d-4267-b492-202e382cc392 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.921664] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-796fafbd-5216-447c-93db-007e36b76e6d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.949340] env[61972]: INFO nova.virt.vmwareapi.images [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] The imported VM was unregistered [ 982.952178] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Caching image {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 982.952424] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.952840] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cc40502-421a-476b-b3ed-081f73a79e93 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.974589] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ad122383-d3e6-41ed-bc9d-aabae5182804 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.271s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.979989] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Created directory with path [datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.980320] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb/OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb.vmdk to [datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk. {{(pid=61972) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 982.980640] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2bc3e8e2-aa75-4680-9012-0ec65b896d7c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.989409] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 982.989409] env[61972]: value = "task-1389543" [ 982.989409] env[61972]: _type = "Task" [ 982.989409] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.999426] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.079578] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.079837] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.080105] env[61972]: DEBUG nova.compute.manager [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 983.080631] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4766f0cb-92eb-46bf-abd4-4960d3e5b5e9 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.576s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.082205] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1be16e-0f86-40e6-80f3-b4f23cd0f8c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.089407] env[61972]: DEBUG nova.compute.manager [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61972) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 983.089965] env[61972]: DEBUG nova.objects.instance [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.159489] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471958} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.159775] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.159980] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.160284] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-899ec202-8123-4ad9-aa2f-c74131bf818b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.167106] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 983.167106] env[61972]: value = "task-1389544" [ 983.167106] env[61972]: _type = "Task" [ 983.167106] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.174633] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389544, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.377056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.377233] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Processing image 5d728988-1ea5-4476-a502-ce99a53302c1 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.377368] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.377522] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.377704] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.377968] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a16ed2cb-cdef-48ad-a9ea-383ff78decb7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.387349] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.387549] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.388425] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de1f4326-ae27-437d-a1d9-cb89ec119370 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.394493] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 983.394493] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b1e71e-c612-5f81-48d1-343ab0188b20" [ 983.394493] env[61972]: _type = "Task" [ 983.394493] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.404113] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b1e71e-c612-5f81-48d1-343ab0188b20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.499953] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.679967] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389544, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062788} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.680271] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.681112] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf429b6-e3a2-4aef-97bc-d0f77b5f0003 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.708628] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.711695] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4abbb4b-1aff-4998-a4e6-0c73ba70ddcd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.735024] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 983.735024] env[61972]: value = "task-1389545" [ 983.735024] env[61972]: _type = "Task" [ 983.735024] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.744345] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.792620] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cfc8f4-9505-400c-bfe6-5be69bd6aee6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.800297] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4839eb7a-16ad-4fd3-a7a6-d66a63d7ab1e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.838585] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b28035-308f-402a-94d5-3bdc734de58d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.847325] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caac9cde-1606-4934-9546-d839d2f8be66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.862455] env[61972]: DEBUG nova.compute.provider_tree [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.905719] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Preparing fetch location {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 983.905984] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Fetch image to [datastore2] OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584/OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584.vmdk {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 983.906192] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Downloading stream optimized image 5d728988-1ea5-4476-a502-ce99a53302c1 to [datastore2] OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584/OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584.vmdk on the data store datastore2 as vApp {{(pid=61972) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 983.906370] env[61972]: DEBUG nova.virt.vmwareapi.images [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Downloading image file data 5d728988-1ea5-4476-a502-ce99a53302c1 to the ESX as VM named 'OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584' {{(pid=61972) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 983.988263] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 983.988263] env[61972]: value = "resgroup-9" [ 983.988263] env[61972]: _type = "ResourcePool" [ 983.988263] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 983.988614] env[61972]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8a2c0d2d-ecca-426c-9413-30138fdc2eb3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.019310] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.019648] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lease: (returnval){ [ 984.019648] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208dffb-ad8e-ac6a-ffd3-71306a816095" [ 984.019648] env[61972]: _type = "HttpNfcLease" [ 984.019648] env[61972]: } obtained for vApp import into resource pool (val){ [ 984.019648] env[61972]: value = "resgroup-9" [ 984.019648] env[61972]: _type = "ResourcePool" [ 984.019648] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 984.019898] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the lease: (returnval){ [ 984.019898] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208dffb-ad8e-ac6a-ffd3-71306a816095" [ 984.019898] env[61972]: _type = "HttpNfcLease" [ 984.019898] env[61972]: } to be ready. {{(pid=61972) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 984.026743] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 984.026743] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208dffb-ad8e-ac6a-ffd3-71306a816095" [ 984.026743] env[61972]: _type = "HttpNfcLease" [ 984.026743] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 984.098916] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.099333] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a58403d-6c4a-48d3-adbf-90e2ff5a7369 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.107853] env[61972]: DEBUG oslo_vmware.api [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 984.107853] env[61972]: value = "task-1389547" [ 984.107853] env[61972]: _type = "Task" [ 984.107853] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.119135] env[61972]: DEBUG oslo_vmware.api [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.243821] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389545, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.342449] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258baa75-cd82-4677-af0a-4297c251612d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.362970] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 0 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 984.367869] env[61972]: DEBUG nova.scheduler.client.report [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 984.516094] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.527824] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 984.527824] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208dffb-ad8e-ac6a-ffd3-71306a816095" [ 984.527824] env[61972]: _type = "HttpNfcLease" [ 984.527824] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 984.527824] env[61972]: } is initializing. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 984.552381] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "91db79db-d83c-4473-87c8-9dff2f042500" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.552381] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "91db79db-d83c-4473-87c8-9dff2f042500" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.618172] env[61972]: DEBUG oslo_vmware.api [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.744178] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389545, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.873528] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.874305] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.874786] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 984.877476] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2aac2bb-4b90-41d2-a0c4-697fc158f887 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.879335] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.232s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.887293] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 984.887293] env[61972]: value = "task-1389548" [ 984.887293] env[61972]: _type = "Task" [ 984.887293] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.897600] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.017956] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.029039] env[61972]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.029039] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208dffb-ad8e-ac6a-ffd3-71306a816095" [ 985.029039] env[61972]: _type = "HttpNfcLease" [ 985.029039] env[61972]: } is ready. {{(pid=61972) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 985.029438] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 985.029438] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5208dffb-ad8e-ac6a-ffd3-71306a816095" [ 985.029438] env[61972]: _type = "HttpNfcLease" [ 985.029438] env[61972]: }. {{(pid=61972) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 985.030230] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de6e72e-3072-4832-b569-681fe117cbcc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.040330] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 985.043129] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524f2ba9-2f0c-7084-a3a6-22dbd19f2d4b/disk-0.vmdk from lease info. {{(pid=61972) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 985.043319] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524f2ba9-2f0c-7084-a3a6-22dbd19f2d4b/disk-0.vmdk. {{(pid=61972) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 985.112027] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5ca1d87-64cd-475d-bb8b-961c2e9f77c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.127312] env[61972]: DEBUG oslo_vmware.api [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.249385] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389545, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.383169] env[61972]: DEBUG nova.compute.utils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 985.389408] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 985.389599] env[61972]: DEBUG nova.network.neutron [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.404029] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.433174] env[61972]: DEBUG nova.policy [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 985.517566] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.565747] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.622742] env[61972]: DEBUG oslo_vmware.api [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389547, 'name': PowerOffVM_Task, 'duration_secs': 1.184685} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.623060] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.623367] env[61972]: DEBUG nova.compute.manager [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 985.624176] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a76116-8fd5-4b7e-980d-8e5e66bfd6f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.745132] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389545, 'name': ReconfigVM_Task, 'duration_secs': 1.578275} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.745442] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Reconfigured VM instance instance-00000060 to attach disk [datastore2] f0565271-2276-4f18-813a-6f9338183480/f0565271-2276-4f18-813a-6f9338183480.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.746070] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17bd1192-ab7e-40cd-93a6-f7b4fe144cc7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.751769] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 985.751769] env[61972]: value = "task-1389549" [ 985.751769] env[61972]: _type = "Task" [ 985.751769] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.761306] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389549, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.861788] env[61972]: DEBUG nova.network.neutron [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Successfully created port: 3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.890306] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 985.895552] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Applying migration context for instance 8745c578-de46-4ade-bf08-f0bc9bb300d8 as it has an incoming, in-progress migration 955996b3-7d42-4b16-94dc-c5aa7e5ad605. Migration status is migrating {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 985.897049] env[61972]: INFO nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating resource usage from migration 955996b3-7d42-4b16-94dc-c5aa7e5ad605 [ 985.914833] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389548, 'name': PowerOffVM_Task, 'duration_secs': 0.743537} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.917436] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.917705] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 17 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 985.922934] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 72435dc4-eae1-4606-bb32-e7e8e282d0b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923111] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance d2864436-05a3-421f-98fd-41df925727c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923261] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance f71d004b-5343-4ef3-8f37-8ff544c335a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923391] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923506] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance bf32c8b2-51b4-495a-b340-5dbabdf33137 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923620] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 56e21cf4-4dbc-4f72-97c0-082dd689c046 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923733] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance f0565271-2276-4f18-813a-6f9338183480 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923844] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 9562558a-89ba-4169-bd0a-ad31fc0c33bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.923958] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Migration 955996b3-7d42-4b16-94dc-c5aa7e5ad605 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 985.924084] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 8745c578-de46-4ade-bf08-f0bc9bb300d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.924201] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance ff25c137-ba78-4807-bd64-f3075e81dd5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 986.021490] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389543, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.625593} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.021771] env[61972]: INFO nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb/OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb.vmdk to [datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk. [ 986.021964] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Cleaning up location [datastore1] OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 986.022151] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_5a84d26b-1640-4798-98cd-d471919857eb {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 986.022413] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-700a6c0d-02f3-4dcd-9015-44366af89d56 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.028758] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 986.028758] env[61972]: value = "task-1389550" [ 986.028758] env[61972]: _type = "Task" [ 986.028758] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.038547] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.138681] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a76e5fb-6eba-469f-b6a0-8b2dbb279695 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.057s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.263188] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389549, 'name': Rename_Task, 'duration_secs': 0.204914} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.265069] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.265376] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7b9cc73-7463-4b77-bd5d-27a001e81bee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.271911] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 986.271911] env[61972]: value = "task-1389551" [ 986.271911] env[61972]: _type = "Task" [ 986.271911] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.283671] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.431104] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.431307] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.431488] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.432293] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.432293] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.432293] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.432293] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.432483] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.432526] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.432691] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.432871] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.439056] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 91db79db-d83c-4473-87c8-9dff2f042500 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 986.439350] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 986.439498] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 986.442302] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef13ed7c-550d-4f1a-968f-5f7cf8d5dd70 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.459265] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 986.459265] env[61972]: value = "task-1389552" [ 986.459265] env[61972]: _type = "Task" [ 986.459265] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.470204] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389552, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.538212] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041652} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.540914] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.541155] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.541418] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk to [datastore1] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.543780] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80331873-8e1f-44a9-9811-2f3727cc84f6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.551337] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 986.551337] env[61972]: value = "task-1389553" [ 986.551337] env[61972]: _type = "Task" [ 986.551337] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.563664] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.620531] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e89307-5397-4fff-871a-5b6461ab4120 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.629330] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e12274-a012-4f3d-a0d8-d422cd2ecd88 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.668526] env[61972]: DEBUG nova.objects.instance [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.677807] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4681a774-ca4c-4fd9-b946-351f1029386b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.686381] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84617cb7-8c84-48f1-9e39-fedc23b4b173 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.703414] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.721226] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Completed reading data from the image iterator. {{(pid=61972) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 986.721486] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524f2ba9-2f0c-7084-a3a6-22dbd19f2d4b/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 986.722474] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e86877-87f5-4b99-ac65-19f13686e5bb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.729850] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524f2ba9-2f0c-7084-a3a6-22dbd19f2d4b/disk-0.vmdk is in state: ready. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 986.730079] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524f2ba9-2f0c-7084-a3a6-22dbd19f2d4b/disk-0.vmdk. {{(pid=61972) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 986.730369] env[61972]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b5e0ac23-0ce2-4ab5-8c79-bc3eb993a8bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.782425] env[61972]: DEBUG oslo_vmware.api [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389551, 'name': PowerOnVM_Task, 'duration_secs': 0.46022} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.782714] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.782924] env[61972]: DEBUG nova.compute.manager [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 986.783825] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4135707f-75a9-4d51-932c-83e3d476128e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.911041] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 986.942131] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.942455] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.942641] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.942833] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.942999] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.943204] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.943440] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.943606] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.943775] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.943942] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.944132] env[61972]: DEBUG nova.virt.hardware [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.945287] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731767ce-34cc-4350-b0e9-7705038a55b8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.954408] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c422f-3ebc-41e3-90f0-39a52b541ece {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.977752] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389552, 'name': ReconfigVM_Task, 'duration_secs': 0.16622} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.978147] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 33 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 987.037726] env[61972]: DEBUG oslo_vmware.rw_handles [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524f2ba9-2f0c-7084-a3a6-22dbd19f2d4b/disk-0.vmdk. {{(pid=61972) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 987.037943] env[61972]: INFO nova.virt.vmwareapi.images [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Downloaded image file data 5d728988-1ea5-4476-a502-ce99a53302c1 [ 987.038845] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a14b32-bed0-4367-b327-59b02aacb583 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.062482] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b51680a6-24df-46d2-8d72-53edb82db1eb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.072413] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389553, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.110027] env[61972]: INFO nova.virt.vmwareapi.images [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] The imported VM was unregistered [ 987.112484] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Caching image {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 987.112734] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.113157] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82840bd5-ccea-476b-823e-d150aab40d92 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.126597] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Created directory with path [datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1 {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.126915] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584/OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584.vmdk to [datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk. {{(pid=61972) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 987.127295] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bc9c57d7-1a34-4af3-8956-93c1ddd9a6a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.134692] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 987.134692] env[61972]: value = "task-1389555" [ 987.134692] env[61972]: _type = "Task" [ 987.134692] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.144698] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.175210] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.175432] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.175686] env[61972]: DEBUG nova.network.neutron [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.176054] env[61972]: DEBUG nova.objects.instance [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'info_cache' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.207663] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 987.301439] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.401600] env[61972]: DEBUG nova.compute.manager [req-f594dbb7-b9ee-4855-b5f9-6de14c64d23a req-4b00a5c3-611c-4fca-958e-ef6bade6393f service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Received event network-vif-plugged-3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 987.401784] env[61972]: DEBUG oslo_concurrency.lockutils [req-f594dbb7-b9ee-4855-b5f9-6de14c64d23a req-4b00a5c3-611c-4fca-958e-ef6bade6393f service nova] Acquiring lock "ff25c137-ba78-4807-bd64-f3075e81dd5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.402017] env[61972]: DEBUG oslo_concurrency.lockutils [req-f594dbb7-b9ee-4855-b5f9-6de14c64d23a req-4b00a5c3-611c-4fca-958e-ef6bade6393f service nova] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.402301] env[61972]: DEBUG oslo_concurrency.lockutils [req-f594dbb7-b9ee-4855-b5f9-6de14c64d23a req-4b00a5c3-611c-4fca-958e-ef6bade6393f service nova] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.402427] env[61972]: DEBUG nova.compute.manager [req-f594dbb7-b9ee-4855-b5f9-6de14c64d23a req-4b00a5c3-611c-4fca-958e-ef6bade6393f service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] No waiting events found dispatching network-vif-plugged-3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.402627] env[61972]: WARNING nova.compute.manager [req-f594dbb7-b9ee-4855-b5f9-6de14c64d23a req-4b00a5c3-611c-4fca-958e-ef6bade6393f service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Received unexpected event network-vif-plugged-3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 for instance with vm_state building and task_state spawning. [ 987.484807] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 987.485095] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 987.485313] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.485583] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 987.485776] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.485934] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 987.486206] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 987.486406] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 987.486864] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 987.486864] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 987.487159] env[61972]: DEBUG nova.virt.hardware [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 987.495202] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 987.496040] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d67e7689-4057-49b7-9839-b1e815211fa2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.515327] env[61972]: DEBUG nova.network.neutron [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Successfully updated port: 3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 987.523740] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 987.523740] env[61972]: value = "task-1389556" [ 987.523740] env[61972]: _type = "Task" [ 987.523740] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.537508] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389556, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.571571] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389553, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.647881] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.682782] env[61972]: DEBUG nova.objects.base [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 987.684391] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "f0565271-2276-4f18-813a-6f9338183480" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.684643] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "f0565271-2276-4f18-813a-6f9338183480" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.684859] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "f0565271-2276-4f18-813a-6f9338183480-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.685154] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "f0565271-2276-4f18-813a-6f9338183480-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.685338] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "f0565271-2276-4f18-813a-6f9338183480-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.688312] env[61972]: INFO nova.compute.manager [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Terminating instance [ 987.713824] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 987.713824] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.833s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.713824] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.147s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.715164] env[61972]: INFO nova.compute.claims [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.018562] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-ff25c137-ba78-4807-bd64-f3075e81dd5d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.018825] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-ff25c137-ba78-4807-bd64-f3075e81dd5d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.018953] env[61972]: DEBUG nova.network.neutron [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.038048] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.073110] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389553, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.148852] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.192546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "refresh_cache-f0565271-2276-4f18-813a-6f9338183480" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.192827] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "refresh_cache-f0565271-2276-4f18-813a-6f9338183480" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.193436] env[61972]: DEBUG nova.network.neutron [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.518391] env[61972]: DEBUG nova.network.neutron [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [{"id": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "address": "fa:16:3e:5b:7e:e2", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98807bc5-c5", "ovs_interfaceid": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.541904] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389556, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.572562] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389553, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.573666] env[61972]: DEBUG nova.network.neutron [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.648665] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.715046] env[61972]: DEBUG nova.network.neutron [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.731464] env[61972]: DEBUG nova.network.neutron [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Updating instance_info_cache with network_info: [{"id": "3cb5d7b0-0653-4fb7-9262-abe4ebb1df74", "address": "fa:16:3e:89:d0:df", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb5d7b0-06", "ovs_interfaceid": "3cb5d7b0-0653-4fb7-9262-abe4ebb1df74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.785501] env[61972]: DEBUG nova.network.neutron [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.925877] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e518f8da-58b0-4295-8e1e-fc56753bfe8c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.936984] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6c8382-29c0-483b-8a75-4fa2e399f22a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.972545] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cb52b9-bc0c-4973-b491-748a69fd59ab {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.982678] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc22d41e-7315-4c81-80e2-a5273f02bd3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.000237] env[61972]: DEBUG nova.compute.provider_tree [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.021473] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.040826] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389556, 'name': ReconfigVM_Task, 'duration_secs': 1.043902} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.041230] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 989.042107] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5842eec-e261-4fc0-8553-11d5e114a665 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.068983] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8/8745c578-de46-4ade-bf08-f0bc9bb300d8.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.069478] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57d9d52e-52cd-46df-b1ce-2cb110de9dfe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.092049] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389553, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.355492} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.092337] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2b2602a-d38e-4ffb-b305-ed7666354ac0/f2b2602a-d38e-4ffb-b305-ed7666354ac0.vmdk to [datastore1] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.093198] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899acbb7-2246-44ca-88d6-5f98ed689e99 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.097088] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 989.097088] env[61972]: value = "task-1389557" [ 989.097088] env[61972]: _type = "Task" [ 989.097088] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.118733] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk or device None with type streamOptimized {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.119988] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7d7fd85-2616-46de-8a58-208f68cdef1a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.140073] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.147628] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 989.147628] env[61972]: value = "task-1389558" [ 989.147628] env[61972]: _type = "Task" [ 989.147628] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.150865] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.159250] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389558, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.238947] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-ff25c137-ba78-4807-bd64-f3075e81dd5d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.239340] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Instance network_info: |[{"id": "3cb5d7b0-0653-4fb7-9262-abe4ebb1df74", "address": "fa:16:3e:89:d0:df", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb5d7b0-06", "ovs_interfaceid": "3cb5d7b0-0653-4fb7-9262-abe4ebb1df74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 989.239859] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:d0:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cb5d7b0-0653-4fb7-9262-abe4ebb1df74', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.247636] env[61972]: DEBUG oslo.service.loopingcall [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.247885] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.248143] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a41633f-dfea-4289-a150-ae886cd6a122 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.268611] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.268611] env[61972]: value = "task-1389559" [ 989.268611] env[61972]: _type = "Task" [ 989.268611] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.276780] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389559, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.288532] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "refresh_cache-f0565271-2276-4f18-813a-6f9338183480" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.288996] env[61972]: DEBUG nova.compute.manager [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 989.289212] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.290406] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84241b9b-a9df-42e5-88a6-baed0fddaae5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.298438] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.298728] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b40229b8-e9f2-43af-b6ce-44cb649409d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.306219] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 989.306219] env[61972]: value = "task-1389560" [ 989.306219] env[61972]: _type = "Task" [ 989.306219] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.314918] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.430947] env[61972]: DEBUG nova.compute.manager [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Received event network-changed-3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 989.431244] env[61972]: DEBUG nova.compute.manager [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Refreshing instance network info cache due to event network-changed-3cb5d7b0-0653-4fb7-9262-abe4ebb1df74. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 989.431604] env[61972]: DEBUG oslo_concurrency.lockutils [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] Acquiring lock "refresh_cache-ff25c137-ba78-4807-bd64-f3075e81dd5d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.431770] env[61972]: DEBUG oslo_concurrency.lockutils [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] Acquired lock "refresh_cache-ff25c137-ba78-4807-bd64-f3075e81dd5d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.432050] env[61972]: DEBUG nova.network.neutron [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Refreshing network info cache for port 3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.503652] env[61972]: DEBUG nova.scheduler.client.report [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 989.607421] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389557, 'name': ReconfigVM_Task, 'duration_secs': 0.399058} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.607691] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8/8745c578-de46-4ade-bf08-f0bc9bb300d8.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.607953] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 50 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 989.649713] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.659350] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389558, 'name': ReconfigVM_Task, 'duration_secs': 0.397692} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.659663] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 56e21cf4-4dbc-4f72-97c0-082dd689c046/56e21cf4-4dbc-4f72-97c0-082dd689c046.vmdk or device None with type streamOptimized {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.660325] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37c5dbca-29f6-43f2-9a0b-19cc5682ffa9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.666646] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 989.666646] env[61972]: value = "task-1389561" [ 989.666646] env[61972]: _type = "Task" [ 989.666646] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.675070] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389561, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.778783] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389559, 'name': CreateVM_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.815010] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389560, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.009863] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.010604] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 990.013456] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.712s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.013995] env[61972]: DEBUG nova.objects.instance [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61972) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 990.028539] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.030067] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb848a19-bd6c-45b5-bf92-6b84a7be4685 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.037085] env[61972]: DEBUG oslo_vmware.api [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 990.037085] env[61972]: value = "task-1389562" [ 990.037085] env[61972]: _type = "Task" [ 990.037085] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.047474] env[61972]: DEBUG oslo_vmware.api [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.115987] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d08202b-d937-4994-84b6-ab800d1c76ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.135081] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bdbbc4-6dcb-4aa9-8f06-640499215fce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.152206] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 67 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.156135] env[61972]: DEBUG nova.network.neutron [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Updated VIF entry in instance network info cache for port 3cb5d7b0-0653-4fb7-9262-abe4ebb1df74. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.156458] env[61972]: DEBUG nova.network.neutron [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Updating instance_info_cache with network_info: [{"id": "3cb5d7b0-0653-4fb7-9262-abe4ebb1df74", "address": "fa:16:3e:89:d0:df", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb5d7b0-06", "ovs_interfaceid": "3cb5d7b0-0653-4fb7-9262-abe4ebb1df74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.165102] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389555, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.622027} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.165954] env[61972]: INFO nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584/OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584.vmdk to [datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk. [ 990.166100] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Cleaning up location [datastore2] OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 990.166272] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e8fcc957-e577-4c4e-853f-894d0ad4f584 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.166513] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95ed7fa6-bb39-490f-97ad-2c9934e84352 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.177727] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 990.177727] env[61972]: value = "task-1389563" [ 990.177727] env[61972]: _type = "Task" [ 990.177727] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.180861] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389561, 'name': Rename_Task, 'duration_secs': 0.155152} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.184292] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.184560] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c947144-806d-485c-a457-3921e5601174 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.191271] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.192912] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 990.192912] env[61972]: value = "task-1389564" [ 990.192912] env[61972]: _type = "Task" [ 990.192912] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.200342] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.279575] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389559, 'name': CreateVM_Task, 'duration_secs': 0.851019} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.279766] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.280550] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.280717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.281065] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 990.281341] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2516d532-b786-4462-aac5-74dc06824d4d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.286069] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 990.286069] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cc9566-f769-4c96-45e8-c7812e40f19b" [ 990.286069] env[61972]: _type = "Task" [ 990.286069] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.293823] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cc9566-f769-4c96-45e8-c7812e40f19b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.315454] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389560, 'name': PowerOffVM_Task, 'duration_secs': 0.620188} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.315789] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.316011] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.316441] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4957e27f-4447-486b-af16-018c97f2d0ac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.351982] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.352294] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.352534] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleting the datastore file [datastore2] f0565271-2276-4f18-813a-6f9338183480 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.352828] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d7889e8-e1dd-4352-8837-c4e4eb3dc748 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.359404] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 990.359404] env[61972]: value = "task-1389566" [ 990.359404] env[61972]: _type = "Task" [ 990.359404] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.368529] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.518126] env[61972]: DEBUG nova.compute.utils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.522284] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 990.522476] env[61972]: DEBUG nova.network.neutron [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.546762] env[61972]: DEBUG oslo_vmware.api [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389562, 'name': PowerOnVM_Task, 'duration_secs': 0.444897} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.547035] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.547243] env[61972]: DEBUG nova.compute.manager [None req-c55777be-f77b-49e8-90a4-3b5f1771ae5d tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 990.547990] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cb344a-66b6-4de6-a2b9-67c6e279d0f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.564081] env[61972]: DEBUG nova.policy [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc3cd61498bc4f858a47a72f02466b3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3c052a272742808be2bcdc71d8f62f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 990.661160] env[61972]: DEBUG oslo_concurrency.lockutils [req-f8e5f7e0-6c92-4b06-b73b-5f81aaa70379 req-eddf2470-7121-4251-8437-0f7cd6ef450c service nova] Releasing lock "refresh_cache-ff25c137-ba78-4807-bd64-f3075e81dd5d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.692021] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041436} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.692021] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.692021] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.692021] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk to [datastore2] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 990.692021] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0960cfcd-a8b2-4e5c-90eb-5a053c0286bc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.699814] env[61972]: DEBUG nova.network.neutron [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Port 78188d45-b47e-4f77-b0d9-e6fa69c90cd7 binding to destination host cpu-1 is already ACTIVE {{(pid=61972) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 990.704228] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389564, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.706555] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 990.706555] env[61972]: value = "task-1389567" [ 990.706555] env[61972]: _type = "Task" [ 990.706555] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.714173] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.804656] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52cc9566-f769-4c96-45e8-c7812e40f19b, 'name': SearchDatastore_Task, 'duration_secs': 0.023468} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.805107] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.805515] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.805854] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.807034] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.807345] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.807736] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-067c06eb-a057-44cb-ae6d-d4c02719b090 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.818924] env[61972]: DEBUG nova.network.neutron [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Successfully created port: 05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.826695] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.826919] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.828037] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a14a73e-4498-4e84-be26-fc0cf30a7131 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.832824] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 990.832824] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52981d49-f739-8898-8ef5-eb2a2c2f12e8" [ 990.832824] env[61972]: _type = "Task" [ 990.832824] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.840380] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52981d49-f739-8898-8ef5-eb2a2c2f12e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.868522] env[61972]: DEBUG oslo_vmware.api [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331254} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.868771] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.868954] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.869158] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.869335] env[61972]: INFO nova.compute.manager [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: f0565271-2276-4f18-813a-6f9338183480] Took 1.58 seconds to destroy the instance on the hypervisor. [ 990.869565] env[61972]: DEBUG oslo.service.loopingcall [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.869758] env[61972]: DEBUG nova.compute.manager [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 990.869873] env[61972]: DEBUG nova.network.neutron [-] [instance: f0565271-2276-4f18-813a-6f9338183480] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.884880] env[61972]: DEBUG nova.network.neutron [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.023346] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 991.026994] env[61972]: DEBUG oslo_concurrency.lockutils [None req-39a46f4e-9a4c-47cd-9f4b-e15b4a41799f tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.204711] env[61972]: DEBUG oslo_vmware.api [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389564, 'name': PowerOnVM_Task, 'duration_secs': 0.529874} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.205043] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.218564] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.307527] env[61972]: DEBUG nova.compute.manager [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 991.308516] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1b907d-f975-418c-a081-4dfc0844db8d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.344246] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52981d49-f739-8898-8ef5-eb2a2c2f12e8, 'name': SearchDatastore_Task, 'duration_secs': 0.061065} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.345107] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67b64f03-2279-475d-8d9e-57b7d262ff46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.354027] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 991.354027] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf174f-112e-dcb1-57ad-58b79946ada0" [ 991.354027] env[61972]: _type = "Task" [ 991.354027] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.361967] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf174f-112e-dcb1-57ad-58b79946ada0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.386851] env[61972]: DEBUG nova.network.neutron [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.735900] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.736117] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.736305] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.737561] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.828948] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2cce21d1-7022-4f30-a8ac-555877b38e69 tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 32.058s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.866581] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf174f-112e-dcb1-57ad-58b79946ada0, 'name': SearchDatastore_Task, 'duration_secs': 0.046998} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.867030] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.867448] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] ff25c137-ba78-4807-bd64-f3075e81dd5d/ff25c137-ba78-4807-bd64-f3075e81dd5d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.867850] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d11e7ecc-5670-4402-9227-763ac7f919c9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.876914] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 991.876914] env[61972]: value = "task-1389568" [ 991.876914] env[61972]: _type = "Task" [ 991.876914] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.885811] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.891521] env[61972]: INFO nova.compute.manager [-] [instance: f0565271-2276-4f18-813a-6f9338183480] Took 1.02 seconds to deallocate network for instance. [ 992.035707] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 992.070551] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.070809] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.071031] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.071347] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.071573] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.071742] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.072159] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.072220] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.072405] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.072580] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.072763] env[61972]: DEBUG nova.virt.hardware [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.073906] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c12cde-dc50-4352-ba65-61e57d998edc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.082247] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20635dc-63a5-4d2b-8d38-95d8f3946c15 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.210856] env[61972]: DEBUG nova.compute.manager [req-ac0104f3-fdfa-4da2-b1e9-a749fca1b4e6 req-9064b153-9c35-47e7-a8ae-544f8fb7960b service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Received event network-vif-plugged-05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 992.211270] env[61972]: DEBUG oslo_concurrency.lockutils [req-ac0104f3-fdfa-4da2-b1e9-a749fca1b4e6 req-9064b153-9c35-47e7-a8ae-544f8fb7960b service nova] Acquiring lock "91db79db-d83c-4473-87c8-9dff2f042500-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.211339] env[61972]: DEBUG oslo_concurrency.lockutils [req-ac0104f3-fdfa-4da2-b1e9-a749fca1b4e6 req-9064b153-9c35-47e7-a8ae-544f8fb7960b service nova] Lock "91db79db-d83c-4473-87c8-9dff2f042500-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.211488] env[61972]: DEBUG oslo_concurrency.lockutils [req-ac0104f3-fdfa-4da2-b1e9-a749fca1b4e6 req-9064b153-9c35-47e7-a8ae-544f8fb7960b service nova] Lock "91db79db-d83c-4473-87c8-9dff2f042500-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.213018] env[61972]: DEBUG nova.compute.manager [req-ac0104f3-fdfa-4da2-b1e9-a749fca1b4e6 req-9064b153-9c35-47e7-a8ae-544f8fb7960b service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] No waiting events found dispatching network-vif-plugged-05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 992.213018] env[61972]: WARNING nova.compute.manager [req-ac0104f3-fdfa-4da2-b1e9-a749fca1b4e6 req-9064b153-9c35-47e7-a8ae-544f8fb7960b service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Received unexpected event network-vif-plugged-05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 for instance with vm_state building and task_state spawning. [ 992.222409] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.311370] env[61972]: DEBUG nova.network.neutron [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Successfully updated port: 05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.387702] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.398440] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.398717] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.398940] env[61972]: DEBUG nova.objects.instance [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lazy-loading 'resources' on Instance uuid f0565271-2276-4f18-813a-6f9338183480 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.722712] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.775630] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.775857] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.776056] env[61972]: DEBUG nova.network.neutron [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.815263] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-91db79db-d83c-4473-87c8-9dff2f042500" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.815430] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-91db79db-d83c-4473-87c8-9dff2f042500" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.815514] env[61972]: DEBUG nova.network.neutron [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.889909] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.080038] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a12362-cb61-48ab-93a4-ca9c0bae9e43 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.089112] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d985c07-3a43-4b8a-b8ba-22f6b5657570 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.121742] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bca8c1-c382-4e00-8838-c5f5e4bf183f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.131386] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996af831-13dd-4443-8631-4be72154195e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.148151] env[61972]: DEBUG nova.compute.provider_tree [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.223907] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.361072] env[61972]: DEBUG nova.network.neutron [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.394702] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389568, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.550067] env[61972]: DEBUG nova.network.neutron [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [{"id": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "address": "fa:16:3e:2d:f5:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78188d45-b4", "ovs_interfaceid": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.652209] env[61972]: DEBUG nova.scheduler.client.report [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 993.658509] env[61972]: DEBUG nova.network.neutron [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Updating instance_info_cache with network_info: [{"id": "05a40b4d-5f28-4ab3-a94f-1c5e50f6a762", "address": "fa:16:3e:45:f9:7e", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05a40b4d-5f", "ovs_interfaceid": "05a40b4d-5f28-4ab3-a94f-1c5e50f6a762", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.726337] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389567, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.76538} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.726658] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5d728988-1ea5-4476-a502-ce99a53302c1/5d728988-1ea5-4476-a502-ce99a53302c1.vmdk to [datastore2] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 993.727592] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359be29e-c400-4cc9-b642-3ca39bf3bcca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.752064] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk or device None with type streamOptimized {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.752492] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4aa83e5-c540-4a18-9a5c-62dc2c9045d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.774914] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 993.774914] env[61972]: value = "task-1389569" [ 993.774914] env[61972]: _type = "Task" [ 993.774914] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.784598] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389569, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.889173] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389568, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.053800] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.158315] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.160741] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-91db79db-d83c-4473-87c8-9dff2f042500" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.161221] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Instance network_info: |[{"id": "05a40b4d-5f28-4ab3-a94f-1c5e50f6a762", "address": "fa:16:3e:45:f9:7e", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05a40b4d-5f", "ovs_interfaceid": "05a40b4d-5f28-4ab3-a94f-1c5e50f6a762", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 994.161506] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:f9:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05a40b4d-5f28-4ab3-a94f-1c5e50f6a762', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.169557] env[61972]: DEBUG oslo.service.loopingcall [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.170086] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.170356] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6b21333-9b14-4122-9007-76057f38b681 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.185770] env[61972]: INFO nova.scheduler.client.report [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleted allocations for instance f0565271-2276-4f18-813a-6f9338183480 [ 994.192418] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.192418] env[61972]: value = "task-1389570" [ 994.192418] env[61972]: _type = "Task" [ 994.192418] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.200435] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389570, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.239448] env[61972]: DEBUG nova.compute.manager [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Received event network-changed-05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 994.239684] env[61972]: DEBUG nova.compute.manager [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Refreshing instance network info cache due to event network-changed-05a40b4d-5f28-4ab3-a94f-1c5e50f6a762. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 994.239917] env[61972]: DEBUG oslo_concurrency.lockutils [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] Acquiring lock "refresh_cache-91db79db-d83c-4473-87c8-9dff2f042500" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.240080] env[61972]: DEBUG oslo_concurrency.lockutils [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] Acquired lock "refresh_cache-91db79db-d83c-4473-87c8-9dff2f042500" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.240251] env[61972]: DEBUG nova.network.neutron [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Refreshing network info cache for port 05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.285634] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389569, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.388975] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389568, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.055977} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.389111] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] ff25c137-ba78-4807-bd64-f3075e81dd5d/ff25c137-ba78-4807-bd64-f3075e81dd5d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 994.389381] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.389666] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3bf2a3d-1048-4cb8-b194-54e3b5bfbdd4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.395881] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 994.395881] env[61972]: value = "task-1389571" [ 994.395881] env[61972]: _type = "Task" [ 994.395881] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.403362] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.575072] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6f747-227d-4779-ba8b-673c1bca032e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.595816] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f259f45d-0060-4955-adfe-d40382f079db {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.602874] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 83 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.693144] env[61972]: DEBUG oslo_concurrency.lockutils [None req-3aa44864-991b-41f0-9563-407d519f6543 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "f0565271-2276-4f18-813a-6f9338183480" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.008s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.703384] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389570, 'name': CreateVM_Task} progress is 25%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.786033] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389569, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.906121] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.323568} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.906438] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 994.907288] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7367a338-c16c-4f3f-b582-4832a407c0de {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.928477] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] ff25c137-ba78-4807-bd64-f3075e81dd5d/ff25c137-ba78-4807-bd64-f3075e81dd5d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 994.931033] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74a7d21c-6590-4f0b-aadb-0aea21263d8c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.950098] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 994.950098] env[61972]: value = "task-1389572" [ 994.950098] env[61972]: _type = "Task" [ 994.950098] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.958565] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389572, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.016837] env[61972]: DEBUG nova.network.neutron [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Updated VIF entry in instance network info cache for port 05a40b4d-5f28-4ab3-a94f-1c5e50f6a762. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.017340] env[61972]: DEBUG nova.network.neutron [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Updating instance_info_cache with network_info: [{"id": "05a40b4d-5f28-4ab3-a94f-1c5e50f6a762", "address": "fa:16:3e:45:f9:7e", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05a40b4d-5f", "ovs_interfaceid": "05a40b4d-5f28-4ab3-a94f-1c5e50f6a762", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.110140] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.110522] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6073173f-8257-45bc-9cce-db2c97e852bd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.117345] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 995.117345] env[61972]: value = "task-1389573" [ 995.117345] env[61972]: _type = "Task" [ 995.117345] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.125324] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.203585] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389570, 'name': CreateVM_Task, 'duration_secs': 1.008996} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.203747] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.204433] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.204596] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.204968] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.205249] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd78e08a-37e2-4f9a-8008-162d2bb1a1dc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.209836] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 995.209836] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52183c9b-2368-ddbd-b76a-ea0a4ef31014" [ 995.209836] env[61972]: _type = "Task" [ 995.209836] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.217508] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52183c9b-2368-ddbd-b76a-ea0a4ef31014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.285949] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389569, 'name': ReconfigVM_Task, 'duration_secs': 1.362305} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.286612] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 9562558a-89ba-4169-bd0a-ad31fc0c33bc/9562558a-89ba-4169-bd0a-ad31fc0c33bc.vmdk or device None with type streamOptimized {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 995.287264] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1045770-2653-4290-bca6-15a778626cfc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.292999] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 995.292999] env[61972]: value = "task-1389574" [ 995.292999] env[61972]: _type = "Task" [ 995.292999] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.302212] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389574, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.326142] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "bf32c8b2-51b4-495a-b340-5dbabdf33137" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.326521] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "bf32c8b2-51b4-495a-b340-5dbabdf33137" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.326809] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "bf32c8b2-51b4-495a-b340-5dbabdf33137-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.327077] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "bf32c8b2-51b4-495a-b340-5dbabdf33137-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.327309] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "bf32c8b2-51b4-495a-b340-5dbabdf33137-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.329722] env[61972]: INFO nova.compute.manager [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Terminating instance [ 995.459467] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389572, 'name': ReconfigVM_Task, 'duration_secs': 0.451815} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.459754] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Reconfigured VM instance instance-00000061 to attach disk [datastore2] ff25c137-ba78-4807-bd64-f3075e81dd5d/ff25c137-ba78-4807-bd64-f3075e81dd5d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 995.460397] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12c74af3-5834-42ec-b7cd-d9c60baeda96 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.466255] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 995.466255] env[61972]: value = "task-1389575" [ 995.466255] env[61972]: _type = "Task" [ 995.466255] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.474374] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389575, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.520192] env[61972]: DEBUG oslo_concurrency.lockutils [req-f1a54315-4aef-4c4c-bd54-6467cbebfdf0 req-2a00e601-494b-4142-9a22-c4a150041100 service nova] Releasing lock "refresh_cache-91db79db-d83c-4473-87c8-9dff2f042500" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.627751] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389573, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.720806] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52183c9b-2368-ddbd-b76a-ea0a4ef31014, 'name': SearchDatastore_Task, 'duration_secs': 0.008799} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.721079] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.721327] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 995.721568] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.721713] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.721891] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 995.722182] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e503687c-0d71-4dc5-a057-7282ca3d9904 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.729794] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 995.729965] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 995.730643] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4482af1a-b84a-40a1-b353-66e1ea695ea1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.735458] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 995.735458] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc86ea-8677-54e3-e6ba-21fad3579db8" [ 995.735458] env[61972]: _type = "Task" [ 995.735458] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.742442] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc86ea-8677-54e3-e6ba-21fad3579db8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.803070] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389574, 'name': Rename_Task, 'duration_secs': 0.147906} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.803413] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.803653] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e56eae3-7cce-46cd-8f4b-cddc6df3c4e9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.810406] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 995.810406] env[61972]: value = "task-1389576" [ 995.810406] env[61972]: _type = "Task" [ 995.810406] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.817610] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.833595] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "refresh_cache-bf32c8b2-51b4-495a-b340-5dbabdf33137" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.833595] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquired lock "refresh_cache-bf32c8b2-51b4-495a-b340-5dbabdf33137" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.833759] env[61972]: DEBUG nova.network.neutron [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.976549] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389575, 'name': Rename_Task, 'duration_secs': 0.174443} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.976830] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.977091] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-519bd9f8-13e0-4683-bf26-9d3c576643cd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.984193] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 995.984193] env[61972]: value = "task-1389577" [ 995.984193] env[61972]: _type = "Task" [ 995.984193] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.991839] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.127915] env[61972]: DEBUG oslo_vmware.api [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389573, 'name': PowerOnVM_Task, 'duration_secs': 0.533583} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.128221] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.128408] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-01accd5e-0fda-4815-a2eb-d5ae9a46fa64 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance '8745c578-de46-4ade-bf08-f0bc9bb300d8' progress to 100 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 996.246143] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bc86ea-8677-54e3-e6ba-21fad3579db8, 'name': SearchDatastore_Task, 'duration_secs': 0.011557} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.246877] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c01fd5c-8598-4d0e-a765-dc0820ae3db0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.251901] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 996.251901] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52be8dc6-83fe-a94d-9c56-9059d22a931b" [ 996.251901] env[61972]: _type = "Task" [ 996.251901] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.259784] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52be8dc6-83fe-a94d-9c56-9059d22a931b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.320447] env[61972]: DEBUG oslo_vmware.api [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389576, 'name': PowerOnVM_Task, 'duration_secs': 0.492556} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.320775] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.351624] env[61972]: DEBUG nova.network.neutron [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 996.397853] env[61972]: DEBUG nova.network.neutron [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.414605] env[61972]: DEBUG nova.compute.manager [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 996.415879] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1826c3aa-2387-47e2-bac3-97dee2beb008 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.494299] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389577, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.560495] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.562041] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.762564] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52be8dc6-83fe-a94d-9c56-9059d22a931b, 'name': SearchDatastore_Task, 'duration_secs': 0.027285} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.762799] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.763070] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 91db79db-d83c-4473-87c8-9dff2f042500/91db79db-d83c-4473-87c8-9dff2f042500.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.763356] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec23f534-e8fb-4f2f-b849-68f0f10df0be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.769810] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 996.769810] env[61972]: value = "task-1389578" [ 996.769810] env[61972]: _type = "Task" [ 996.769810] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.777536] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.900511] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Releasing lock "refresh_cache-bf32c8b2-51b4-495a-b340-5dbabdf33137" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.900961] env[61972]: DEBUG nova.compute.manager [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 996.901221] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 996.902098] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9eea7d-a8bb-4c9d-abbf-3853922e3b55 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.910021] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.910262] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b648820-2653-4bd8-af7b-e32f4dd9c78f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.916383] env[61972]: DEBUG oslo_vmware.api [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 996.916383] env[61972]: value = "task-1389579" [ 996.916383] env[61972]: _type = "Task" [ 996.916383] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.923707] env[61972]: DEBUG oslo_vmware.api [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389579, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.934146] env[61972]: DEBUG oslo_concurrency.lockutils [None req-aea76979-a288-4203-a162-c80c25bec061 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.176s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.995719] env[61972]: DEBUG oslo_vmware.api [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389577, 'name': PowerOnVM_Task, 'duration_secs': 0.711279} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.996195] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.996474] env[61972]: INFO nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Took 10.09 seconds to spawn the instance on the hypervisor. [ 996.996689] env[61972]: DEBUG nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 996.997566] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a992d366-ae13-46a1-9679-65237d4de9c8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.065402] env[61972]: DEBUG nova.compute.utils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 997.282388] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389578, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.426507] env[61972]: DEBUG oslo_vmware.api [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389579, 'name': PowerOffVM_Task, 'duration_secs': 0.130596} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.426841] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 997.426947] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 997.427220] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bddd8f7-884d-4e2f-bea3-4461242dea05 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.451558] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.451697] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.451889] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleting the datastore file [datastore2] bf32c8b2-51b4-495a-b340-5dbabdf33137 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.452381] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b81bf913-87a4-4b52-835e-84f4f76ca7f0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.458587] env[61972]: DEBUG oslo_vmware.api [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for the task: (returnval){ [ 997.458587] env[61972]: value = "task-1389581" [ 997.458587] env[61972]: _type = "Task" [ 997.458587] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.466553] env[61972]: DEBUG oslo_vmware.api [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.515016] env[61972]: INFO nova.compute.manager [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Took 17.88 seconds to build instance. [ 997.569068] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.780945] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652979} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.781279] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 91db79db-d83c-4473-87c8-9dff2f042500/91db79db-d83c-4473-87c8-9dff2f042500.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 997.781558] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 997.781832] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4353317-6e7e-4e80-ad2f-a05f5627b148 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.789168] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 997.789168] env[61972]: value = "task-1389582" [ 997.789168] env[61972]: _type = "Task" [ 997.789168] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.798440] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389582, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.968915] env[61972]: DEBUG oslo_vmware.api [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Task: {'id': task-1389581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235339} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.969227] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.969437] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.969622] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.969822] env[61972]: INFO nova.compute.manager [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Took 1.07 seconds to destroy the instance on the hypervisor. [ 997.970115] env[61972]: DEBUG oslo.service.loopingcall [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.970327] env[61972]: DEBUG nova.compute.manager [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 997.970426] env[61972]: DEBUG nova.network.neutron [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 997.988195] env[61972]: DEBUG nova.network.neutron [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.016591] env[61972]: DEBUG oslo_concurrency.lockutils [None req-855035be-6bbf-47ca-b8be-82594457e2e9 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.385s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.303975] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389582, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067211} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.303975] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.305214] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a801c278-8b2e-4a6b-b50d-01ae21b182f3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.329246] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 91db79db-d83c-4473-87c8-9dff2f042500/91db79db-d83c-4473-87c8-9dff2f042500.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.329605] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67a24e25-d851-43aa-9d7a-aef847d33635 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.349226] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 998.349226] env[61972]: value = "task-1389583" [ 998.349226] env[61972]: _type = "Task" [ 998.349226] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.357693] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.490598] env[61972]: DEBUG nova.network.neutron [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.638340] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.638648] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.638890] env[61972]: INFO nova.compute.manager [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Attaching volume fe46ee70-0cdf-4cf9-b768-631668745fa1 to /dev/sdb [ 998.680150] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0abe157-f5a4-4b3a-b83b-2b50d8035e7b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.689485] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb235ab3-d690-4b70-8e32-7fd7e245bb81 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.705841] env[61972]: DEBUG nova.virt.block_device [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updating existing volume attachment record: b77a1787-1c29-4d9a-acc9-f99d148d0cdb {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 998.859500] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.862194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.862503] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.862729] env[61972]: DEBUG nova.compute.manager [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Going to confirm migration 2 {{(pid=61972) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 998.933022] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "ff25c137-ba78-4807-bd64-f3075e81dd5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.933350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.933565] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "ff25c137-ba78-4807-bd64-f3075e81dd5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.933757] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.933917] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.936144] env[61972]: INFO nova.compute.manager [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Terminating instance [ 998.993574] env[61972]: INFO nova.compute.manager [-] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Took 1.02 seconds to deallocate network for instance. [ 999.359971] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389583, 'name': ReconfigVM_Task, 'duration_secs': 0.588188} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.360310] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 91db79db-d83c-4473-87c8-9dff2f042500/91db79db-d83c-4473-87c8-9dff2f042500.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.360931] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8dc8a06b-0370-42ba-8503-0b7ed36ca901 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.368475] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 999.368475] env[61972]: value = "task-1389587" [ 999.368475] env[61972]: _type = "Task" [ 999.368475] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.378180] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389587, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.416026] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf68896-1727-4109-8b44-d28d0c5dda52 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.423028] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Suspending the VM {{(pid=61972) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 999.423321] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a29710fa-26f3-4a33-8d87-db192b2f3039 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.428168] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.428380] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.428603] env[61972]: DEBUG nova.network.neutron [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.428826] env[61972]: DEBUG nova.objects.instance [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'info_cache' on Instance uuid 8745c578-de46-4ade-bf08-f0bc9bb300d8 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.432141] env[61972]: DEBUG oslo_vmware.api [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 999.432141] env[61972]: value = "task-1389588" [ 999.432141] env[61972]: _type = "Task" [ 999.432141] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.441225] env[61972]: DEBUG nova.compute.manager [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 999.441494] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.441822] env[61972]: DEBUG oslo_vmware.api [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389588, 'name': SuspendVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.442643] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0285a3-069d-41a5-8a5f-4def7ae9e03d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.449993] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.450364] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d9be769-b307-400a-bc91-436bc373ded0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.456279] env[61972]: DEBUG oslo_vmware.api [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 999.456279] env[61972]: value = "task-1389589" [ 999.456279] env[61972]: _type = "Task" [ 999.456279] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.467933] env[61972]: DEBUG oslo_vmware.api [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.500643] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.501038] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.501261] env[61972]: DEBUG nova.objects.instance [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lazy-loading 'resources' on Instance uuid bf32c8b2-51b4-495a-b340-5dbabdf33137 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.880133] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389587, 'name': Rename_Task, 'duration_secs': 0.167276} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.880468] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.880735] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52519ada-5c8b-403c-ad2c-eafba5f7c17a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.886919] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 999.886919] env[61972]: value = "task-1389590" [ 999.886919] env[61972]: _type = "Task" [ 999.886919] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.895873] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.941994] env[61972]: DEBUG oslo_vmware.api [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389588, 'name': SuspendVM_Task} progress is 58%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.966280] env[61972]: DEBUG oslo_vmware.api [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389589, 'name': PowerOffVM_Task, 'duration_secs': 0.29359} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.966535] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.966743] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 999.967026] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8ebe5b5-af85-4c63-b1fe-92882fadc92c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.077536] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.077776] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.077967] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore2] ff25c137-ba78-4807-bd64-f3075e81dd5d {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.080961] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1a1f601-305c-460e-8901-f717d254ab2d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.087955] env[61972]: DEBUG oslo_vmware.api [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1000.087955] env[61972]: value = "task-1389592" [ 1000.087955] env[61972]: _type = "Task" [ 1000.087955] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.098396] env[61972]: DEBUG oslo_vmware.api [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.168420] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21b23fd-ae7b-4cd2-9620-d411f376a7b1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.177025] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ef8758-fd42-4b17-9d70-a7a522b3e528 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.207766] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4065c8b-71ea-4f96-92f5-a035418a8317 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.216081] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25de36c6-a505-40bc-ae70-1804032fe1d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.232990] env[61972]: DEBUG nova.compute.provider_tree [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.397804] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389590, 'name': PowerOnVM_Task} progress is 79%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.445770] env[61972]: DEBUG oslo_vmware.api [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389588, 'name': SuspendVM_Task, 'duration_secs': 0.898919} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.446100] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Suspended the VM {{(pid=61972) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1000.446288] env[61972]: DEBUG nova.compute.manager [None req-f7eedf69-cb8c-4365-8c1d-6f6e2595c822 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1000.447121] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4661f2c9-3547-4255-bf3a-fa26aef18fcb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.599107] env[61972]: DEBUG oslo_vmware.api [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44048} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.599392] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.599569] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.599752] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.599928] env[61972]: INFO nova.compute.manager [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1000.600214] env[61972]: DEBUG oslo.service.loopingcall [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.600415] env[61972]: DEBUG nova.compute.manager [-] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1000.600512] env[61972]: DEBUG nova.network.neutron [-] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1000.657500] env[61972]: DEBUG nova.network.neutron [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [{"id": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "address": "fa:16:3e:2d:f5:0c", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78188d45-b4", "ovs_interfaceid": "78188d45-b47e-4f77-b0d9-e6fa69c90cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.736612] env[61972]: DEBUG nova.scheduler.client.report [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1000.859178] env[61972]: DEBUG nova.compute.manager [req-3e2bc092-8196-4054-978b-631739956586 req-ac2965e3-3084-410c-86a6-a37904b0cddb service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Received event network-vif-deleted-3cb5d7b0-0653-4fb7-9262-abe4ebb1df74 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1000.859178] env[61972]: INFO nova.compute.manager [req-3e2bc092-8196-4054-978b-631739956586 req-ac2965e3-3084-410c-86a6-a37904b0cddb service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Neutron deleted interface 3cb5d7b0-0653-4fb7-9262-abe4ebb1df74; detaching it from the instance and deleting it from the info cache [ 1000.859413] env[61972]: DEBUG nova.network.neutron [req-3e2bc092-8196-4054-978b-631739956586 req-ac2965e3-3084-410c-86a6-a37904b0cddb service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.896979] env[61972]: DEBUG oslo_vmware.api [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389590, 'name': PowerOnVM_Task, 'duration_secs': 0.965496} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.897278] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.897623] env[61972]: INFO nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1000.897849] env[61972]: DEBUG nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1000.898641] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d575281-64d7-48b4-bc46-9d8f33b9b1fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.161061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-8745c578-de46-4ade-bf08-f0bc9bb300d8" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.161061] env[61972]: DEBUG nova.objects.instance [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'migration_context' on Instance uuid 8745c578-de46-4ade-bf08-f0bc9bb300d8 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.241553] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.263429] env[61972]: INFO nova.scheduler.client.report [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Deleted allocations for instance bf32c8b2-51b4-495a-b340-5dbabdf33137 [ 1001.335968] env[61972]: DEBUG nova.network.neutron [-] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.361501] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c00f4c2b-4282-45d4-87bf-8e5989cc2452 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.370921] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff00040-eeda-4e02-a97f-61e1dedde563 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.397995] env[61972]: DEBUG nova.compute.manager [req-3e2bc092-8196-4054-978b-631739956586 req-ac2965e3-3084-410c-86a6-a37904b0cddb service nova] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Detach interface failed, port_id=3cb5d7b0-0653-4fb7-9262-abe4ebb1df74, reason: Instance ff25c137-ba78-4807-bd64-f3075e81dd5d could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1001.414219] env[61972]: INFO nova.compute.manager [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Took 15.87 seconds to build instance. [ 1001.663454] env[61972]: DEBUG nova.objects.base [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Object Instance<8745c578-de46-4ade-bf08-f0bc9bb300d8> lazy-loaded attributes: info_cache,migration_context {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1001.664989] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6784f54c-cda4-4cca-b90c-7dc8836c2bcf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.683497] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68a49878-79cd-411c-9644-bc590dd18dc1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.688657] env[61972]: DEBUG oslo_vmware.api [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1001.688657] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5230314e-cc56-6819-f717-15e9d206e297" [ 1001.688657] env[61972]: _type = "Task" [ 1001.688657] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.696328] env[61972]: DEBUG oslo_vmware.api [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5230314e-cc56-6819-f717-15e9d206e297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.775964] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce68ae28-0cba-43f0-908c-77053cbde8f0 tempest-ServerShowV247Test-1728103715 tempest-ServerShowV247Test-1728103715-project-member] Lock "bf32c8b2-51b4-495a-b340-5dbabdf33137" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.449s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.804127] env[61972]: INFO nova.compute.manager [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Resuming [ 1001.804763] env[61972]: DEBUG nova.objects.instance [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'flavor' on Instance uuid 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.838613] env[61972]: INFO nova.compute.manager [-] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Took 1.24 seconds to deallocate network for instance. [ 1001.916869] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e9a96e44-81f9-431b-a216-ac6532ea148a tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "91db79db-d83c-4473-87c8-9dff2f042500" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.384s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.200593] env[61972]: DEBUG oslo_vmware.api [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5230314e-cc56-6819-f717-15e9d206e297, 'name': SearchDatastore_Task, 'duration_secs': 0.006504} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.200869] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.201108] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.345055] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.670435] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033e6ef6-36e5-4f60-8e66-fa2a594ef363 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.677200] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Suspending the VM {{(pid=61972) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1002.677470] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-bb3552dc-b8de-4787-bc9c-51ae5ed72f57 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.683506] env[61972]: DEBUG oslo_vmware.api [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1002.683506] env[61972]: value = "task-1389594" [ 1002.683506] env[61972]: _type = "Task" [ 1002.683506] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.691566] env[61972]: DEBUG oslo_vmware.api [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389594, 'name': SuspendVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.812865] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.813216] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquired lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.813526] env[61972]: DEBUG nova.network.neutron [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.857683] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d591437f-044c-4678-830e-7c2acd114a76 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.866822] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2926f4dd-ebba-45c7-969c-aa67e14387c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.905588] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750837db-b78b-4820-9da6-26127d2a35d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.918293] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792c0641-a3e8-4043-91d7-9485f111a00d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.941690] env[61972]: DEBUG nova.compute.provider_tree [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.195822] env[61972]: DEBUG oslo_vmware.api [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389594, 'name': SuspendVM_Task} progress is 54%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.254491] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Volume attach. Driver type: vmdk {{(pid=61972) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1003.254734] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294921', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'name': 'volume-fe46ee70-0cdf-4cf9-b768-631668745fa1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f71d004b-5343-4ef3-8f37-8ff544c335a2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'serial': 'fe46ee70-0cdf-4cf9-b768-631668745fa1'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1003.255683] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451d0a60-d0b0-477e-a7bc-d6806c6e24ca {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.272551] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8fc634-7686-420f-ad67-7db4ef01a0e6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.298398] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] volume-fe46ee70-0cdf-4cf9-b768-631668745fa1/volume-fe46ee70-0cdf-4cf9-b768-631668745fa1.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.298654] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d3695f9-8bf9-4f56-853e-ad284243592a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.319402] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1003.319402] env[61972]: value = "task-1389595" [ 1003.319402] env[61972]: _type = "Task" [ 1003.319402] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.328965] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389595, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.445912] env[61972]: DEBUG nova.scheduler.client.report [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1003.535322] env[61972]: DEBUG nova.network.neutron [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [{"id": "96b44391-970b-458b-bb63-47288e6d18a2", "address": "fa:16:3e:01:b3:84", "network": {"id": "8bbc91d8-798f-4938-9eb4-274a709d8ef5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1689289217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fd99c56733940dda5267401c71b9e5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96b44391-97", "ovs_interfaceid": "96b44391-970b-458b-bb63-47288e6d18a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.694736] env[61972]: DEBUG oslo_vmware.api [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389594, 'name': SuspendVM_Task, 'duration_secs': 0.677559} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.695072] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Suspended the VM {{(pid=61972) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1003.695726] env[61972]: DEBUG nova.compute.manager [None req-64a3bb46-a80a-4eb5-801f-ea516a89105f tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1003.696278] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa5cf26-7ef8-4903-910e-1d00027edf6e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.832288] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389595, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.037985] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Releasing lock "refresh_cache-9562558a-89ba-4169-bd0a-ad31fc0c33bc" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.038975] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e00497-da59-4eed-87f1-3ebd4f866266 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.046205] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Resuming the VM {{(pid=61972) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1004.046454] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62a0e4ea-e229-4233-8db6-183d90bda3ac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.052789] env[61972]: DEBUG oslo_vmware.api [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 1004.052789] env[61972]: value = "task-1389596" [ 1004.052789] env[61972]: _type = "Task" [ 1004.052789] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.060459] env[61972]: DEBUG oslo_vmware.api [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.331643] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389595, 'name': ReconfigVM_Task, 'duration_secs': 0.943638} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.331872] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Reconfigured VM instance instance-0000005b to attach disk [datastore2] volume-fe46ee70-0cdf-4cf9-b768-631668745fa1/volume-fe46ee70-0cdf-4cf9-b768-631668745fa1.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.336460] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcb8f683-96de-40a9-9265-b23adef7b0cb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.350917] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1004.350917] env[61972]: value = "task-1389597" [ 1004.350917] env[61972]: _type = "Task" [ 1004.350917] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.358872] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389597, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.455582] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.254s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.458622] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.114s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.458805] env[61972]: DEBUG nova.objects.instance [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'resources' on Instance uuid ff25c137-ba78-4807-bd64-f3075e81dd5d {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.563165] env[61972]: DEBUG oslo_vmware.api [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.861801] env[61972]: DEBUG oslo_vmware.api [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389597, 'name': ReconfigVM_Task, 'duration_secs': 0.205836} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.862518] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294921', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'name': 'volume-fe46ee70-0cdf-4cf9-b768-631668745fa1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f71d004b-5343-4ef3-8f37-8ff544c335a2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'serial': 'fe46ee70-0cdf-4cf9-b768-631668745fa1'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1005.018242] env[61972]: INFO nova.scheduler.client.report [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocation for migration 955996b3-7d42-4b16-94dc-c5aa7e5ad605 [ 1005.063089] env[61972]: DEBUG oslo_vmware.api [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389596, 'name': PowerOnVM_Task, 'duration_secs': 0.549513} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.065690] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Resumed the VM {{(pid=61972) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1005.065880] env[61972]: DEBUG nova.compute.manager [None req-e2ec515c-8046-4454-9a65-3abdeb6b8087 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1005.067528] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b894ad-1980-4067-ab89-0ab8feda8ab4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.104706] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b368e0d-9d75-4eaf-9a1c-5750b903797d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.112434] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d978e69-00e4-484e-9cfd-20cb3c00f132 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.117512] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "91db79db-d83c-4473-87c8-9dff2f042500" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.118011] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "91db79db-d83c-4473-87c8-9dff2f042500" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.118011] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "91db79db-d83c-4473-87c8-9dff2f042500-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.118246] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "91db79db-d83c-4473-87c8-9dff2f042500-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.118409] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "91db79db-d83c-4473-87c8-9dff2f042500-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.145368] env[61972]: INFO nova.compute.manager [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Terminating instance [ 1005.147235] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a678f3-0097-46f9-a520-895c03a812b5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.155979] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da201552-0ec8-479f-be7d-14a20467a0be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.169699] env[61972]: DEBUG nova.compute.provider_tree [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.524325] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2a659881-3fea-454a-a450-b042e565c806 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.662s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.652345] env[61972]: DEBUG nova.compute.manager [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1005.652715] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.653849] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141b5514-97e9-4052-a718-81434edd10ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.663659] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.663901] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-931d615a-d19e-494a-b6cc-906d15c92e96 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.672375] env[61972]: DEBUG nova.scheduler.client.report [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1005.754294] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.754550] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.754734] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleting the datastore file [datastore1] 91db79db-d83c-4473-87c8-9dff2f042500 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.755013] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05b3f759-2596-477e-a07d-550a394b1c7f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.763045] env[61972]: DEBUG oslo_vmware.api [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1005.763045] env[61972]: value = "task-1389599" [ 1005.763045] env[61972]: _type = "Task" [ 1005.763045] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.770957] env[61972]: DEBUG oslo_vmware.api [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.901335] env[61972]: DEBUG nova.objects.instance [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lazy-loading 'flavor' on Instance uuid f71d004b-5343-4ef3-8f37-8ff544c335a2 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.144624] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.144931] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.145277] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.145402] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.145538] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.148101] env[61972]: INFO nova.compute.manager [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Terminating instance [ 1006.177373] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.718s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.196040] env[61972]: INFO nova.scheduler.client.report [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance ff25c137-ba78-4807-bd64-f3075e81dd5d [ 1006.274579] env[61972]: DEBUG oslo_vmware.api [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159404} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.274793] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.274978] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.275182] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.275376] env[61972]: INFO nova.compute.manager [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1006.275683] env[61972]: DEBUG oslo.service.loopingcall [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1006.275881] env[61972]: DEBUG nova.compute.manager [-] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1006.275978] env[61972]: DEBUG nova.network.neutron [-] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1006.408169] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0fd949fd-f966-49e7-a98f-e655d6413f4d tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.768s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.588016] env[61972]: DEBUG nova.compute.manager [req-da19aa5b-61c4-44d3-a92a-aa57dadb332e req-5e1a8cbb-40b6-4e45-92de-0c0017eea51d service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Received event network-vif-deleted-05a40b4d-5f28-4ab3-a94f-1c5e50f6a762 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1006.588016] env[61972]: INFO nova.compute.manager [req-da19aa5b-61c4-44d3-a92a-aa57dadb332e req-5e1a8cbb-40b6-4e45-92de-0c0017eea51d service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Neutron deleted interface 05a40b4d-5f28-4ab3-a94f-1c5e50f6a762; detaching it from the instance and deleting it from the info cache [ 1006.588016] env[61972]: DEBUG nova.network.neutron [req-da19aa5b-61c4-44d3-a92a-aa57dadb332e req-5e1a8cbb-40b6-4e45-92de-0c0017eea51d service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.653917] env[61972]: DEBUG nova.compute.manager [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1006.654154] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1006.655454] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2161b11-21d7-4043-be03-6a203becf0e2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.663580] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.664088] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a7dd0cb-bb77-43aa-9936-3116237e35f1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.672084] env[61972]: DEBUG oslo_vmware.api [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1006.672084] env[61972]: value = "task-1389600" [ 1006.672084] env[61972]: _type = "Task" [ 1006.672084] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.680280] env[61972]: DEBUG oslo_vmware.api [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.689732] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.690873] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.704842] env[61972]: DEBUG oslo_concurrency.lockutils [None req-f26d6ab3-ecdb-41ad-a1ff-231683533586 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "ff25c137-ba78-4807-bd64-f3075e81dd5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.771s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.867145] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.867448] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.867689] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.867877] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.868074] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.870184] env[61972]: INFO nova.compute.manager [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Terminating instance [ 1007.026304] env[61972]: DEBUG nova.network.neutron [-] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.091945] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79849d99-bfe4-4026-88e2-b94b5eaeb167 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.101085] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f68c71-c4c0-4560-a73e-f96e5d427541 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.129563] env[61972]: DEBUG nova.compute.manager [req-da19aa5b-61c4-44d3-a92a-aa57dadb332e req-5e1a8cbb-40b6-4e45-92de-0c0017eea51d service nova] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Detach interface failed, port_id=05a40b4d-5f28-4ab3-a94f-1c5e50f6a762, reason: Instance 91db79db-d83c-4473-87c8-9dff2f042500 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1007.182816] env[61972]: DEBUG oslo_vmware.api [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389600, 'name': PowerOffVM_Task, 'duration_secs': 0.190056} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.183192] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.183410] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.183664] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4722036-9ae4-456b-9879-16113c34287f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.192886] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1007.307063] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.307447] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.316618] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.316965] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.317284] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleting the datastore file [datastore1] 8745c578-de46-4ade-bf08-f0bc9bb300d8 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.317653] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-557d3c91-a343-4266-a9fe-0dac9262c718 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.324962] env[61972]: DEBUG oslo_vmware.api [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1007.324962] env[61972]: value = "task-1389602" [ 1007.324962] env[61972]: _type = "Task" [ 1007.324962] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.333018] env[61972]: DEBUG oslo_vmware.api [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.374977] env[61972]: DEBUG nova.compute.manager [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1007.375245] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.376211] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bae4bac-84cc-43c1-b747-2356867c0bda {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.384111] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.384385] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47df6543-832b-4050-81a1-1acfaeecfa5e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.390675] env[61972]: DEBUG oslo_vmware.api [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1007.390675] env[61972]: value = "task-1389603" [ 1007.390675] env[61972]: _type = "Task" [ 1007.390675] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.398441] env[61972]: DEBUG oslo_vmware.api [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.528798] env[61972]: INFO nova.compute.manager [-] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Took 1.25 seconds to deallocate network for instance. [ 1007.717655] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.717936] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.719450] env[61972]: INFO nova.compute.claims [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.811170] env[61972]: INFO nova.compute.manager [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Detaching volume fe46ee70-0cdf-4cf9-b768-631668745fa1 [ 1007.834862] env[61972]: DEBUG oslo_vmware.api [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159918} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.835181] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1007.835444] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1007.835646] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1007.835966] env[61972]: INFO nova.compute.manager [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1007.836069] env[61972]: DEBUG oslo.service.loopingcall [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.836589] env[61972]: DEBUG nova.compute.manager [-] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1007.836721] env[61972]: DEBUG nova.network.neutron [-] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1007.840747] env[61972]: INFO nova.virt.block_device [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Attempting to driver detach volume fe46ee70-0cdf-4cf9-b768-631668745fa1 from mountpoint /dev/sdb [ 1007.840973] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Volume detach. Driver type: vmdk {{(pid=61972) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1007.841221] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294921', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'name': 'volume-fe46ee70-0cdf-4cf9-b768-631668745fa1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f71d004b-5343-4ef3-8f37-8ff544c335a2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'serial': 'fe46ee70-0cdf-4cf9-b768-631668745fa1'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1007.842153] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad8ebac-e1be-4ab9-8a1e-df616e450365 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.870041] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568c44e4-76d5-4d3c-9812-289153411841 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.877827] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d6c34c-c623-4b71-a5a4-043cc2c7c56c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.901349] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdda3441-8c3d-4f19-a4f2-23e7b5a081af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.919962] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] The volume has not been displaced from its original location: [datastore2] volume-fe46ee70-0cdf-4cf9-b768-631668745fa1/volume-fe46ee70-0cdf-4cf9-b768-631668745fa1.vmdk. No consolidation needed. {{(pid=61972) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1007.925615] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Reconfiguring VM instance instance-0000005b to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1007.925615] env[61972]: DEBUG oslo_vmware.api [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389603, 'name': PowerOffVM_Task, 'duration_secs': 0.20165} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.925742] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30743caa-b60c-42cd-bb17-095775c16430 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.938213] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.938404] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.938684] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba730a97-20aa-4a2e-afc7-ae45980149f9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.945511] env[61972]: DEBUG oslo_vmware.api [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1007.945511] env[61972]: value = "task-1389605" [ 1007.945511] env[61972]: _type = "Task" [ 1007.945511] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.953289] env[61972]: DEBUG oslo_vmware.api [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.005762] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.006009] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.006208] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore2] e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.006481] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13ff96ce-55df-457c-b1c5-9e8fdb09504e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.012239] env[61972]: DEBUG oslo_vmware.api [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1008.012239] env[61972]: value = "task-1389606" [ 1008.012239] env[61972]: _type = "Task" [ 1008.012239] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.020625] env[61972]: DEBUG oslo_vmware.api [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.039344] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.458128] env[61972]: DEBUG oslo_vmware.api [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389605, 'name': ReconfigVM_Task, 'duration_secs': 0.236685} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.458564] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Reconfigured VM instance instance-0000005b to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1008.466482] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5ca171-f484-4b00-ad11-ff091ac2837f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.484177] env[61972]: DEBUG oslo_vmware.api [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1008.484177] env[61972]: value = "task-1389607" [ 1008.484177] env[61972]: _type = "Task" [ 1008.484177] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.495867] env[61972]: DEBUG oslo_vmware.api [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389607, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.521165] env[61972]: DEBUG oslo_vmware.api [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.408044} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.521459] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.521662] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.521846] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.522043] env[61972]: INFO nova.compute.manager [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1008.522291] env[61972]: DEBUG oslo.service.loopingcall [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.522485] env[61972]: DEBUG nova.compute.manager [-] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1008.522583] env[61972]: DEBUG nova.network.neutron [-] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.610859] env[61972]: DEBUG nova.network.neutron [-] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.621282] env[61972]: DEBUG nova.compute.manager [req-2330db36-65ac-4d50-93bc-55afdc8020b9 req-080ec924-8c87-409f-ae78-96d8f250b10f service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Received event network-vif-deleted-78188d45-b47e-4f77-b0d9-e6fa69c90cd7 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1008.621510] env[61972]: INFO nova.compute.manager [req-2330db36-65ac-4d50-93bc-55afdc8020b9 req-080ec924-8c87-409f-ae78-96d8f250b10f service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Neutron deleted interface 78188d45-b47e-4f77-b0d9-e6fa69c90cd7; detaching it from the instance and deleting it from the info cache [ 1008.621686] env[61972]: DEBUG nova.network.neutron [req-2330db36-65ac-4d50-93bc-55afdc8020b9 req-080ec924-8c87-409f-ae78-96d8f250b10f service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.574387] env[61972]: INFO nova.compute.manager [-] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Took 1.74 seconds to deallocate network for instance. [ 1009.579717] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b75e7b18-0e1f-4e48-a7b1-be699bc68427 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.588927] env[61972]: DEBUG oslo_vmware.api [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389607, 'name': ReconfigVM_Task, 'duration_secs': 0.243747} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.590487] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294921', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'name': 'volume-fe46ee70-0cdf-4cf9-b768-631668745fa1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f71d004b-5343-4ef3-8f37-8ff544c335a2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe46ee70-0cdf-4cf9-b768-631668745fa1', 'serial': 'fe46ee70-0cdf-4cf9-b768-631668745fa1'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1009.596142] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7157019b-b711-4ed0-9b68-36a4a343290d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.607204] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d44f2a-9532-43b2-95cf-74f959a4aad9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.615725] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478729a2-0947-4eea-9202-acc8ce026b86 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.625834] env[61972]: DEBUG nova.compute.manager [req-2330db36-65ac-4d50-93bc-55afdc8020b9 req-080ec924-8c87-409f-ae78-96d8f250b10f service nova] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Detach interface failed, port_id=78188d45-b47e-4f77-b0d9-e6fa69c90cd7, reason: Instance 8745c578-de46-4ade-bf08-f0bc9bb300d8 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1009.652155] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde41663-c221-4de6-8d56-f112d497947b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.659525] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62db439f-91fd-4c9a-b652-acc381d0cb0d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.673840] env[61972]: DEBUG nova.compute.provider_tree [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.076431] env[61972]: DEBUG nova.network.neutron [-] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.084998] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.140222] env[61972]: DEBUG nova.objects.instance [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lazy-loading 'flavor' on Instance uuid f71d004b-5343-4ef3-8f37-8ff544c335a2 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.176921] env[61972]: DEBUG nova.scheduler.client.report [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1010.495160] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.495453] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.495703] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.495895] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.496080] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.498325] env[61972]: INFO nova.compute.manager [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Terminating instance [ 1010.579099] env[61972]: INFO nova.compute.manager [-] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Took 2.06 seconds to deallocate network for instance. [ 1010.648702] env[61972]: DEBUG nova.compute.manager [req-56685722-8e28-4d46-b667-2d280898b475 req-ef9e2ac3-32a6-49dd-af77-a13df8e61de4 service nova] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Received event network-vif-deleted-582a2b0a-d087-426c-b734-d6fc7ececb7d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1010.682298] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.964s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.682833] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1010.685455] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.646s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.685718] env[61972]: DEBUG nova.objects.instance [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'resources' on Instance uuid 91db79db-d83c-4473-87c8-9dff2f042500 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.002060] env[61972]: DEBUG nova.compute.manager [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1011.002315] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.003291] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d236cb-ca2e-48ad-aefd-87beefef5ab2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.011084] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.011347] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3daecd5-065b-4050-9e0d-26077be1d3a9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.017441] env[61972]: DEBUG oslo_vmware.api [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 1011.017441] env[61972]: value = "task-1389608" [ 1011.017441] env[61972]: _type = "Task" [ 1011.017441] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.024387] env[61972]: DEBUG oslo_vmware.api [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.085013] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.147801] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0588eeb0-404d-469d-a90f-d419995ff3f3 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.840s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.189279] env[61972]: DEBUG nova.compute.utils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1011.193608] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1011.193832] env[61972]: DEBUG nova.network.neutron [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.235056] env[61972]: DEBUG nova.policy [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cefef67f4ae0451aaa108df20aa7a3db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a685a448ff041db8bc49b4429688e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 1011.319789] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701ccb2e-f874-47a4-9374-1c4e1a2ba9bf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.327711] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca36c687-e715-401a-bc15-fe7a03d6999b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.357495] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4803580f-4e50-4f1f-985e-cea1881a7188 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.364521] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a96c494-5f40-41c3-b2b6-a97ef889b065 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.378461] env[61972]: DEBUG nova.compute.provider_tree [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.481896] env[61972]: DEBUG nova.network.neutron [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Successfully created port: 1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1011.526205] env[61972]: DEBUG oslo_vmware.api [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389608, 'name': PowerOffVM_Task, 'duration_secs': 0.205581} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.526554] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1011.526803] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1011.526977] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f2547db-6966-4452-9a24-0104da4b1b97 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.579144] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.579465] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.579751] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "f71d004b-5343-4ef3-8f37-8ff544c335a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.579975] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.580170] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.582490] env[61972]: INFO nova.compute.manager [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Terminating instance [ 1011.586153] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.586368] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.586553] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleting the datastore file [datastore2] 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.586956] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-489a62b6-2d19-4705-a684-6fcb80e78410 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.592931] env[61972]: DEBUG oslo_vmware.api [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for the task: (returnval){ [ 1011.592931] env[61972]: value = "task-1389610" [ 1011.592931] env[61972]: _type = "Task" [ 1011.592931] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.600923] env[61972]: DEBUG oslo_vmware.api [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.694596] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1011.881581] env[61972]: DEBUG nova.scheduler.client.report [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1012.090031] env[61972]: DEBUG nova.compute.manager [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1012.090031] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.090031] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f949f368-c504-45b4-a05b-4d1238e3399b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.099421] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.100060] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-647a02d6-4e56-4524-8d3a-a8f337320933 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.104542] env[61972]: DEBUG oslo_vmware.api [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Task: {'id': task-1389610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145162} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.105095] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.105292] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.105470] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.105839] env[61972]: INFO nova.compute.manager [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1012.106118] env[61972]: DEBUG oslo.service.loopingcall [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.106322] env[61972]: DEBUG nova.compute.manager [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1012.106419] env[61972]: DEBUG nova.network.neutron [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.109513] env[61972]: DEBUG oslo_vmware.api [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1012.109513] env[61972]: value = "task-1389611" [ 1012.109513] env[61972]: _type = "Task" [ 1012.109513] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.117765] env[61972]: DEBUG oslo_vmware.api [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.386779] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.389379] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.304s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.389702] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.391254] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.306s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.391498] env[61972]: DEBUG nova.objects.instance [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'resources' on Instance uuid e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.442722] env[61972]: INFO nova.scheduler.client.report [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocations for instance 8745c578-de46-4ade-bf08-f0bc9bb300d8 [ 1012.444680] env[61972]: INFO nova.scheduler.client.report [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocations for instance 91db79db-d83c-4473-87c8-9dff2f042500 [ 1012.510682] env[61972]: DEBUG nova.compute.manager [req-15ff6fbb-4f18-45db-84ef-855b07cad54c req-147b3cf9-3073-41f7-b238-11072dd018c1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Received event network-vif-deleted-96b44391-970b-458b-bb63-47288e6d18a2 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1012.510898] env[61972]: INFO nova.compute.manager [req-15ff6fbb-4f18-45db-84ef-855b07cad54c req-147b3cf9-3073-41f7-b238-11072dd018c1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Neutron deleted interface 96b44391-970b-458b-bb63-47288e6d18a2; detaching it from the instance and deleting it from the info cache [ 1012.511104] env[61972]: DEBUG nova.network.neutron [req-15ff6fbb-4f18-45db-84ef-855b07cad54c req-147b3cf9-3073-41f7-b238-11072dd018c1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.619191] env[61972]: DEBUG oslo_vmware.api [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389611, 'name': PowerOffVM_Task, 'duration_secs': 0.193786} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.619530] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.619640] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.619885] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a99f9191-686f-4e7e-a943-176b99891a91 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.681154] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.681346] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.681534] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleting the datastore file [datastore2] f71d004b-5343-4ef3-8f37-8ff544c335a2 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.681795] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0d03f17-8d77-4780-a902-016ec352bd7a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.687490] env[61972]: DEBUG oslo_vmware.api [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1012.687490] env[61972]: value = "task-1389613" [ 1012.687490] env[61972]: _type = "Task" [ 1012.687490] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.695378] env[61972]: DEBUG oslo_vmware.api [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389613, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.704546] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1012.730961] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1012.731242] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1012.731406] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.731590] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1012.731738] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.731886] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1012.732111] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1012.732277] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1012.732446] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1012.732612] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1012.732785] env[61972]: DEBUG nova.virt.hardware [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.733714] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04e9011-2244-4d30-8dbd-a5ad58d52196 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.741840] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacc713f-af42-4556-aac3-82b3d0d3ca65 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.863120] env[61972]: DEBUG nova.compute.manager [req-e518e5ef-a3a0-48a2-93de-35405f0238b6 req-9c276027-36c9-4aec-b040-7340ff1d42f7 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Received event network-vif-plugged-1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1012.863120] env[61972]: DEBUG oslo_concurrency.lockutils [req-e518e5ef-a3a0-48a2-93de-35405f0238b6 req-9c276027-36c9-4aec-b040-7340ff1d42f7 service nova] Acquiring lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.863120] env[61972]: DEBUG oslo_concurrency.lockutils [req-e518e5ef-a3a0-48a2-93de-35405f0238b6 req-9c276027-36c9-4aec-b040-7340ff1d42f7 service nova] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.863120] env[61972]: DEBUG oslo_concurrency.lockutils [req-e518e5ef-a3a0-48a2-93de-35405f0238b6 req-9c276027-36c9-4aec-b040-7340ff1d42f7 service nova] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.863802] env[61972]: DEBUG nova.compute.manager [req-e518e5ef-a3a0-48a2-93de-35405f0238b6 req-9c276027-36c9-4aec-b040-7340ff1d42f7 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] No waiting events found dispatching network-vif-plugged-1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1012.864133] env[61972]: WARNING nova.compute.manager [req-e518e5ef-a3a0-48a2-93de-35405f0238b6 req-9c276027-36c9-4aec-b040-7340ff1d42f7 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Received unexpected event network-vif-plugged-1c8136d3-adb9-485c-b04a-26fb85df1f6d for instance with vm_state building and task_state spawning. [ 1012.910024] env[61972]: DEBUG nova.network.neutron [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.948309] env[61972]: DEBUG nova.network.neutron [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Successfully updated port: 1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1012.957579] env[61972]: DEBUG oslo_concurrency.lockutils [None req-65ed6ab1-57ed-4889-9b8c-3e826597c04f tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "8745c578-de46-4ade-bf08-f0bc9bb300d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.813s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.959352] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c181b4a0-20b7-40c1-ae6e-5ee3afde97c3 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "91db79db-d83c-4473-87c8-9dff2f042500" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.841s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.012583] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242b986e-770a-465d-857d-ac3c190479d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.015333] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff2f9337-dbf8-49fa-b734-761c896b5e9b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.022134] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8143dbb9-e462-4a28-ab30-0b56fb8988cf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.027226] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecce18f0-0481-4910-986f-fcc47fcf5a6a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.068662] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b74447-c313-49d6-ab13-93dd312763e5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.071280] env[61972]: DEBUG nova.compute.manager [req-15ff6fbb-4f18-45db-84ef-855b07cad54c req-147b3cf9-3073-41f7-b238-11072dd018c1 service nova] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Detach interface failed, port_id=96b44391-970b-458b-bb63-47288e6d18a2, reason: Instance 9562558a-89ba-4169-bd0a-ad31fc0c33bc could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1013.075997] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5631ee52-31b3-458f-a85a-6e34344aac8d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.089506] env[61972]: DEBUG nova.compute.provider_tree [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.197382] env[61972]: DEBUG oslo_vmware.api [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389613, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150107} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.197857] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.198060] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.198243] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.198419] env[61972]: INFO nova.compute.manager [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1013.198655] env[61972]: DEBUG oslo.service.loopingcall [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.198843] env[61972]: DEBUG nova.compute.manager [-] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1013.198938] env[61972]: DEBUG nova.network.neutron [-] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.411138] env[61972]: INFO nova.compute.manager [-] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Took 1.30 seconds to deallocate network for instance. [ 1013.455022] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "refresh_cache-86d022e2-bd02-45cd-a9dd-362e912dd8e1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.455022] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "refresh_cache-86d022e2-bd02-45cd-a9dd-362e912dd8e1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.455022] env[61972]: DEBUG nova.network.neutron [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.592925] env[61972]: DEBUG nova.scheduler.client.report [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1013.919039] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.984935] env[61972]: DEBUG nova.network.neutron [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.099531] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.102374] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.184s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.102374] env[61972]: DEBUG nova.objects.instance [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lazy-loading 'resources' on Instance uuid 9562558a-89ba-4169-bd0a-ad31fc0c33bc {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.118329] env[61972]: INFO nova.scheduler.client.report [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d [ 1014.127771] env[61972]: DEBUG nova.network.neutron [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Updating instance_info_cache with network_info: [{"id": "1c8136d3-adb9-485c-b04a-26fb85df1f6d", "address": "fa:16:3e:02:df:cb", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c8136d3-ad", "ovs_interfaceid": "1c8136d3-adb9-485c-b04a-26fb85df1f6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.167731] env[61972]: DEBUG nova.network.neutron [-] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.533058] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.533058] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.628304] env[61972]: DEBUG oslo_concurrency.lockutils [None req-43dd83fe-b29d-49b6-8834-e437b16eadbb tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.761s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.633899] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "refresh_cache-86d022e2-bd02-45cd-a9dd-362e912dd8e1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.634218] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Instance network_info: |[{"id": "1c8136d3-adb9-485c-b04a-26fb85df1f6d", "address": "fa:16:3e:02:df:cb", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c8136d3-ad", "ovs_interfaceid": "1c8136d3-adb9-485c-b04a-26fb85df1f6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1014.634650] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:df:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c8136d3-adb9-485c-b04a-26fb85df1f6d', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.642415] env[61972]: DEBUG oslo.service.loopingcall [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.645735] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.647023] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62f9b615-00f4-4ed6-9b58-ada85dafc9ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.669906] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.669906] env[61972]: value = "task-1389614" [ 1014.669906] env[61972]: _type = "Task" [ 1014.669906] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.674579] env[61972]: INFO nova.compute.manager [-] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Took 1.48 seconds to deallocate network for instance. [ 1014.683215] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389614, 'name': CreateVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.717023] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9f96e9-8e78-4448-b56c-0c64faadf14f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.724446] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e8c184-377e-421b-a848-6ace16a5bac6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.755173] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4c9f33-ac38-4d81-b670-af88fe5eb3da {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.762201] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c1de76-f1ce-42b7-aa5d-36c92f0fd5ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.775104] env[61972]: DEBUG nova.compute.provider_tree [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.890392] env[61972]: DEBUG nova.compute.manager [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Received event network-changed-1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1014.890609] env[61972]: DEBUG nova.compute.manager [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Refreshing instance network info cache due to event network-changed-1c8136d3-adb9-485c-b04a-26fb85df1f6d. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1014.890826] env[61972]: DEBUG oslo_concurrency.lockutils [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] Acquiring lock "refresh_cache-86d022e2-bd02-45cd-a9dd-362e912dd8e1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.890970] env[61972]: DEBUG oslo_concurrency.lockutils [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] Acquired lock "refresh_cache-86d022e2-bd02-45cd-a9dd-362e912dd8e1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.891208] env[61972]: DEBUG nova.network.neutron [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Refreshing network info cache for port 1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.035277] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1015.179607] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389614, 'name': CreateVM_Task, 'duration_secs': 0.306899} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.180551] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.180747] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.181423] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.181584] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.181894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1015.182160] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcabad1c-0fea-42f3-8055-9ba1982a5f03 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.186415] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1015.186415] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7da9a-2a34-67a5-68ac-860bedfbe520" [ 1015.186415] env[61972]: _type = "Task" [ 1015.186415] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.193854] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7da9a-2a34-67a5-68ac-860bedfbe520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.278507] env[61972]: DEBUG nova.scheduler.client.report [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1015.555493] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.595194] env[61972]: DEBUG nova.network.neutron [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Updated VIF entry in instance network info cache for port 1c8136d3-adb9-485c-b04a-26fb85df1f6d. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1015.595602] env[61972]: DEBUG nova.network.neutron [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Updating instance_info_cache with network_info: [{"id": "1c8136d3-adb9-485c-b04a-26fb85df1f6d", "address": "fa:16:3e:02:df:cb", "network": {"id": "767c99dd-b0a1-4b40-91b5-a0241463e3d0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-456613371-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a685a448ff041db8bc49b4429688e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c8136d3-ad", "ovs_interfaceid": "1c8136d3-adb9-485c-b04a-26fb85df1f6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.696635] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b7da9a-2a34-67a5-68ac-860bedfbe520, 'name': SearchDatastore_Task, 'duration_secs': 0.009698} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.696956] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.697217] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.697451] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.697602] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.697780] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.698048] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0681c60a-df15-4c7d-843c-cd6547777c46 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.706033] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.706178] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.706841] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f9d7eec-85ae-45ea-a23f-f35d2cd744fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.713191] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1015.713191] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e9265d-7283-6c50-319f-103c2212d979" [ 1015.713191] env[61972]: _type = "Task" [ 1015.713191] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.718874] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e9265d-7283-6c50-319f-103c2212d979, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.783283] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.786190] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.605s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.786190] env[61972]: DEBUG nova.objects.instance [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lazy-loading 'resources' on Instance uuid f71d004b-5343-4ef3-8f37-8ff544c335a2 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.799956] env[61972]: INFO nova.scheduler.client.report [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Deleted allocations for instance 9562558a-89ba-4169-bd0a-ad31fc0c33bc [ 1016.070036] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "fe623f2c-1fd9-43f0-be96-29bb252e0171" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.070316] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.097648] env[61972]: DEBUG oslo_concurrency.lockutils [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] Releasing lock "refresh_cache-86d022e2-bd02-45cd-a9dd-362e912dd8e1" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.097970] env[61972]: DEBUG nova.compute.manager [req-f110414e-71ae-414e-b025-4bbf23ce624f req-383dbf90-a6cb-431c-a8ba-f353d4e85977 service nova] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Received event network-vif-deleted-fdefc4b7-2c39-496c-9909-b5e05cbdc1da {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1016.221846] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52e9265d-7283-6c50-319f-103c2212d979, 'name': SearchDatastore_Task, 'duration_secs': 0.010096} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.222649] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b2be9f5-c5e3-4231-be74-e0f2d128e13f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.227300] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1016.227300] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525973f5-2e3d-d607-af70-595275e960d5" [ 1016.227300] env[61972]: _type = "Task" [ 1016.227300] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.234546] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525973f5-2e3d-d607-af70-595275e960d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.307051] env[61972]: DEBUG oslo_concurrency.lockutils [None req-a67b260b-66c2-4cdc-a728-1db61f70fc27 tempest-ServersNegativeTestJSON-1832544787 tempest-ServersNegativeTestJSON-1832544787-project-member] Lock "9562558a-89ba-4169-bd0a-ad31fc0c33bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.812s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.374205] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c454ea3-1d7c-43b9-8439-d99c2cb42e28 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.381951] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95be281a-19df-4f13-92df-ca2e523c2aa9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.411941] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45dae4fb-bc95-41af-8d0d-54a4e332d8ff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.419204] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddf1be4-f3d7-454d-9f82-a62c2f5c59d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.433096] env[61972]: DEBUG nova.compute.provider_tree [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.572148] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1016.738177] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525973f5-2e3d-d607-af70-595275e960d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009302} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.738467] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.738732] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 86d022e2-bd02-45cd-a9dd-362e912dd8e1/86d022e2-bd02-45cd-a9dd-362e912dd8e1.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1016.738978] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d6706d0-14a1-4313-a0c3-36a4812d2dc3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.745575] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1016.745575] env[61972]: value = "task-1389615" [ 1016.745575] env[61972]: _type = "Task" [ 1016.745575] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.752693] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.936477] env[61972]: DEBUG nova.scheduler.client.report [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1017.099134] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.255198] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483238} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.255479] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 86d022e2-bd02-45cd-a9dd-362e912dd8e1/86d022e2-bd02-45cd-a9dd-362e912dd8e1.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.255716] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.255966] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e23483d-87cc-41de-9c6f-c05f95922adf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.261966] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1017.261966] env[61972]: value = "task-1389616" [ 1017.261966] env[61972]: _type = "Task" [ 1017.261966] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.269088] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.442986] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.445617] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.890s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.447794] env[61972]: INFO nova.compute.claims [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.464899] env[61972]: INFO nova.scheduler.client.report [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleted allocations for instance f71d004b-5343-4ef3-8f37-8ff544c335a2 [ 1017.771698] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062965} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.771966] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.772788] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880ca86e-9f97-4fc1-9109-8067e3b803ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.795764] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 86d022e2-bd02-45cd-a9dd-362e912dd8e1/86d022e2-bd02-45cd-a9dd-362e912dd8e1.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.796420] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-824251b9-ffa0-4b88-ae80-1c56b6631665 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.815618] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1017.815618] env[61972]: value = "task-1389617" [ 1017.815618] env[61972]: _type = "Task" [ 1017.815618] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.824675] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389617, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.973222] env[61972]: DEBUG oslo_concurrency.lockutils [None req-54dd6781-d07e-4596-8717-88eced1d55d4 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "f71d004b-5343-4ef3-8f37-8ff544c335a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.394s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.325668] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389617, 'name': ReconfigVM_Task, 'duration_secs': 0.28767} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.326448] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 86d022e2-bd02-45cd-a9dd-362e912dd8e1/86d022e2-bd02-45cd-a9dd-362e912dd8e1.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.326600] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d83b915-e6d5-41f7-8438-72c20e84b171 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.332431] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1018.332431] env[61972]: value = "task-1389618" [ 1018.332431] env[61972]: _type = "Task" [ 1018.332431] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.339807] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389618, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.566893] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770d5102-3489-41b9-8f8a-ed0629649a67 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.573593] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafed40c-aef7-4fa6-9170-e24f9d7119b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.603785] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9466e2ea-9014-4ea4-a365-841d28384c16 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.610670] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb7116c-dd68-407a-be00-ea7ed528677a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.624711] env[61972]: DEBUG nova.compute.provider_tree [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.841544] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389618, 'name': Rename_Task, 'duration_secs': 0.13575} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.841817] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.842160] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72d39792-62b3-4eaf-9a49-ef3d699c00c4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.847693] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1018.847693] env[61972]: value = "task-1389619" [ 1018.847693] env[61972]: _type = "Task" [ 1018.847693] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.854979] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.128329] env[61972]: DEBUG nova.scheduler.client.report [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1019.357734] env[61972]: DEBUG oslo_vmware.api [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389619, 'name': PowerOnVM_Task, 'duration_secs': 0.480766} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.358026] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1019.358263] env[61972]: INFO nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Took 6.65 seconds to spawn the instance on the hypervisor. [ 1019.358513] env[61972]: DEBUG nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1019.359378] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845e909b-0ca8-44ca-ad8c-71eddcb0fa69 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.633288] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.633862] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1019.636907] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.539s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.638539] env[61972]: INFO nova.compute.claims [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.844658] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.844905] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.874236] env[61972]: INFO nova.compute.manager [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Took 12.18 seconds to build instance. [ 1020.142914] env[61972]: DEBUG nova.compute.utils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1020.146340] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1020.146442] env[61972]: DEBUG nova.network.neutron [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1020.191169] env[61972]: DEBUG nova.policy [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc3cd61498bc4f858a47a72f02466b3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3c052a272742808be2bcdc71d8f62f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 1020.347804] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1020.375972] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d489ee92-3736-4448-8d6b-8c598a6fbd86 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.686s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.436107] env[61972]: DEBUG nova.network.neutron [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Successfully created port: 2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1020.647436] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1020.744628] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286277e7-dd7e-4549-a37e-18bd7e706a3e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.753792] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78bc114-4f36-42af-b982-c8c158c85024 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.782494] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61684194-1265-448a-9ab8-78b5d391456d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.790080] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a02b54c-a21b-411b-8aa8-51f58670e4b5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.802874] env[61972]: DEBUG nova.compute.provider_tree [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.866148] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.306420] env[61972]: DEBUG nova.scheduler.client.report [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1021.618056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.618056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.618466] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.618466] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.618546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.620596] env[61972]: INFO nova.compute.manager [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Terminating instance [ 1021.657889] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1021.692619] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1021.692862] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1021.693068] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.693396] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1021.693662] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.693930] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1021.694221] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1021.694411] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1021.694599] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1021.694780] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1021.694958] env[61972]: DEBUG nova.virt.hardware [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1021.696078] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050e481f-c3e3-4f9a-a399-53e327ba1c79 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.703928] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce09aa9-f7cb-4b2b-a14a-078f8506a4d4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.807710] env[61972]: DEBUG nova.compute.manager [req-2f6f6f49-64e2-4b13-901d-05962208667f req-e8c57916-b299-4f6e-bf4e-263490d0208a service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Received event network-vif-plugged-2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1021.807945] env[61972]: DEBUG oslo_concurrency.lockutils [req-2f6f6f49-64e2-4b13-901d-05962208667f req-e8c57916-b299-4f6e-bf4e-263490d0208a service nova] Acquiring lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.808274] env[61972]: DEBUG oslo_concurrency.lockutils [req-2f6f6f49-64e2-4b13-901d-05962208667f req-e8c57916-b299-4f6e-bf4e-263490d0208a service nova] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.808369] env[61972]: DEBUG oslo_concurrency.lockutils [req-2f6f6f49-64e2-4b13-901d-05962208667f req-e8c57916-b299-4f6e-bf4e-263490d0208a service nova] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.808518] env[61972]: DEBUG nova.compute.manager [req-2f6f6f49-64e2-4b13-901d-05962208667f req-e8c57916-b299-4f6e-bf4e-263490d0208a service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] No waiting events found dispatching network-vif-plugged-2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1021.808718] env[61972]: WARNING nova.compute.manager [req-2f6f6f49-64e2-4b13-901d-05962208667f req-e8c57916-b299-4f6e-bf4e-263490d0208a service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Received unexpected event network-vif-plugged-2cc52359-688b-48a0-8436-a1d5cfd37738 for instance with vm_state building and task_state spawning. [ 1021.810835] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.811345] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1021.813854] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.948s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.815458] env[61972]: INFO nova.compute.claims [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.125156] env[61972]: DEBUG nova.compute.manager [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1022.125309] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.126140] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f720421b-630a-4e05-b81e-841054fc800d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.133965] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.134207] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdf67f56-0907-4bb6-b2c5-7cf13b1caa42 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.140454] env[61972]: DEBUG oslo_vmware.api [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1022.140454] env[61972]: value = "task-1389620" [ 1022.140454] env[61972]: _type = "Task" [ 1022.140454] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.147386] env[61972]: DEBUG oslo_vmware.api [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.322420] env[61972]: DEBUG nova.compute.utils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1022.325980] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1022.325980] env[61972]: DEBUG nova.network.neutron [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1022.376099] env[61972]: DEBUG nova.network.neutron [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Successfully updated port: 2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1022.384933] env[61972]: DEBUG nova.policy [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 1022.403626] env[61972]: DEBUG nova.compute.manager [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Received event network-changed-2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1022.403999] env[61972]: DEBUG nova.compute.manager [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Refreshing instance network info cache due to event network-changed-2cc52359-688b-48a0-8436-a1d5cfd37738. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1022.404048] env[61972]: DEBUG oslo_concurrency.lockutils [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] Acquiring lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.404199] env[61972]: DEBUG oslo_concurrency.lockutils [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] Acquired lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.404372] env[61972]: DEBUG nova.network.neutron [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Refreshing network info cache for port 2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.650994] env[61972]: DEBUG oslo_vmware.api [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389620, 'name': PowerOffVM_Task, 'duration_secs': 0.210866} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.651316] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.651474] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1022.651731] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b59297f7-ecac-4a56-a0ab-59ddbdf9a5cc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.719639] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.719873] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.720069] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleting the datastore file [datastore2] 86d022e2-bd02-45cd-a9dd-362e912dd8e1 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.720342] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94e1fc1c-c376-4ccd-8c92-f9a5a3b5f87f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.726031] env[61972]: DEBUG oslo_vmware.api [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for the task: (returnval){ [ 1022.726031] env[61972]: value = "task-1389622" [ 1022.726031] env[61972]: _type = "Task" [ 1022.726031] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.733549] env[61972]: DEBUG oslo_vmware.api [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.827850] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1022.874477] env[61972]: DEBUG nova.network.neutron [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Successfully created port: bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.879424] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.936082] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6cef32-25c1-4020-9244-20829889d7ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.939975] env[61972]: DEBUG nova.network.neutron [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1022.946638] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db8a489-eea3-4090-ae49-66a86437182b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.977761] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa377266-c352-410e-b2e6-a3b47b92cbc1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.985696] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd131df-19ca-40d6-9802-97afb861694d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.000198] env[61972]: DEBUG nova.compute.provider_tree [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.062313] env[61972]: DEBUG nova.network.neutron [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.235515] env[61972]: DEBUG oslo_vmware.api [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Task: {'id': task-1389622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.388729} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.235787] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.235977] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.236177] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.236357] env[61972]: INFO nova.compute.manager [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1023.236609] env[61972]: DEBUG oslo.service.loopingcall [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.236801] env[61972]: DEBUG nova.compute.manager [-] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1023.236899] env[61972]: DEBUG nova.network.neutron [-] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.509498] env[61972]: DEBUG nova.scheduler.client.report [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1023.564642] env[61972]: DEBUG oslo_concurrency.lockutils [req-7e7a334f-83e3-4157-9238-b668846b0dd7 req-aa90cbc1-a394-4a40-85ab-8cf55dbd71c3 service nova] Releasing lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.565048] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.565378] env[61972]: DEBUG nova.network.neutron [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1023.837835] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1023.864245] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1023.864539] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1023.864701] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.864890] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1023.865034] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.865189] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1023.865397] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1023.865560] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1023.865726] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1023.865887] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1023.866069] env[61972]: DEBUG nova.virt.hardware [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.866913] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4bd850-a85c-4617-bd92-96a1743c4b1c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.874580] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400d4839-74d8-4e24-a821-274ceb026653 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.965711] env[61972]: DEBUG nova.network.neutron [-] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.014135] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.200s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.014750] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1024.099898] env[61972]: DEBUG nova.network.neutron [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1024.248668] env[61972]: DEBUG nova.network.neutron [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [{"id": "2cc52359-688b-48a0-8436-a1d5cfd37738", "address": "fa:16:3e:ed:78:92", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc52359-68", "ovs_interfaceid": "2cc52359-688b-48a0-8436-a1d5cfd37738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.331792] env[61972]: DEBUG nova.compute.manager [req-e131e4ce-c833-43c6-ab71-68887d2b85dd req-6165eb3f-dd51-41bd-8af6-d6eb6a4fbbc3 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Received event network-vif-plugged-bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1024.332125] env[61972]: DEBUG oslo_concurrency.lockutils [req-e131e4ce-c833-43c6-ab71-68887d2b85dd req-6165eb3f-dd51-41bd-8af6-d6eb6a4fbbc3 service nova] Acquiring lock "fe623f2c-1fd9-43f0-be96-29bb252e0171-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.332426] env[61972]: DEBUG oslo_concurrency.lockutils [req-e131e4ce-c833-43c6-ab71-68887d2b85dd req-6165eb3f-dd51-41bd-8af6-d6eb6a4fbbc3 service nova] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.332666] env[61972]: DEBUG oslo_concurrency.lockutils [req-e131e4ce-c833-43c6-ab71-68887d2b85dd req-6165eb3f-dd51-41bd-8af6-d6eb6a4fbbc3 service nova] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.332901] env[61972]: DEBUG nova.compute.manager [req-e131e4ce-c833-43c6-ab71-68887d2b85dd req-6165eb3f-dd51-41bd-8af6-d6eb6a4fbbc3 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] No waiting events found dispatching network-vif-plugged-bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1024.333140] env[61972]: WARNING nova.compute.manager [req-e131e4ce-c833-43c6-ab71-68887d2b85dd req-6165eb3f-dd51-41bd-8af6-d6eb6a4fbbc3 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Received unexpected event network-vif-plugged-bbcf3c01-de4f-46b2-af22-eb28c8a3bcde for instance with vm_state building and task_state spawning. [ 1024.416324] env[61972]: DEBUG nova.network.neutron [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Successfully updated port: bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.433065] env[61972]: DEBUG nova.compute.manager [req-df271719-0356-4fe6-ba62-31e2c71b3aeb req-11fec3ba-a3e5-45f0-9d11-312575ae8bf9 service nova] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Received event network-vif-deleted-1c8136d3-adb9-485c-b04a-26fb85df1f6d {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1024.468188] env[61972]: INFO nova.compute.manager [-] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Took 1.23 seconds to deallocate network for instance. [ 1024.519953] env[61972]: DEBUG nova.compute.utils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1024.521305] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1024.521481] env[61972]: DEBUG nova.network.neutron [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.557818] env[61972]: DEBUG nova.policy [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47ebbe5ddb8b41bbb1a54cf191aef61a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '651d8f34661542219f5451bce866ec02', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 1024.750999] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.751349] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Instance network_info: |[{"id": "2cc52359-688b-48a0-8436-a1d5cfd37738", "address": "fa:16:3e:ed:78:92", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc52359-68", "ovs_interfaceid": "2cc52359-688b-48a0-8436-a1d5cfd37738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1024.751796] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:78:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e99c063c-0cb7-4db6-b077-114166cfe889', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cc52359-688b-48a0-8436-a1d5cfd37738', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1024.759214] env[61972]: DEBUG oslo.service.loopingcall [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1024.759425] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1024.759663] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-312da59b-bb9a-4878-bc26-108b9c439fff {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.778894] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1024.778894] env[61972]: value = "task-1389623" [ 1024.778894] env[61972]: _type = "Task" [ 1024.778894] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.788133] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389623, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.817162] env[61972]: DEBUG nova.network.neutron [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Successfully created port: 984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.919079] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-fe623f2c-1fd9-43f0-be96-29bb252e0171" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.919393] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-fe623f2c-1fd9-43f0-be96-29bb252e0171" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.919685] env[61972]: DEBUG nova.network.neutron [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.974963] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.975358] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.975629] env[61972]: DEBUG nova.objects.instance [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lazy-loading 'resources' on Instance uuid 86d022e2-bd02-45cd-a9dd-362e912dd8e1 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.025445] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1025.291357] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389623, 'name': CreateVM_Task, 'duration_secs': 0.309427} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.291484] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1025.292199] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.292430] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.292762] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1025.293317] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f01af2fd-f651-4116-999c-8d272c5133b0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.297907] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1025.297907] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d0fc8d-5dac-e349-b826-ffd317d06ac7" [ 1025.297907] env[61972]: _type = "Task" [ 1025.297907] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.305518] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d0fc8d-5dac-e349-b826-ffd317d06ac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.451660] env[61972]: DEBUG nova.network.neutron [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.584171] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cb8a4f-6938-4cd6-a75c-604c1f36c5b0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.591315] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f467cd-2d62-4fb7-95a0-5eddf93f79c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.594973] env[61972]: DEBUG nova.network.neutron [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Updating instance_info_cache with network_info: [{"id": "bbcf3c01-de4f-46b2-af22-eb28c8a3bcde", "address": "fa:16:3e:f3:48:03", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf3c01-de", "ovs_interfaceid": "bbcf3c01-de4f-46b2-af22-eb28c8a3bcde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.622586] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd481c12-79d2-450f-9673-5703a54f1094 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.630165] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fc4abb-67c0-42d3-bd19-07eee7f97601 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.643488] env[61972]: DEBUG nova.compute.provider_tree [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.807196] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d0fc8d-5dac-e349-b826-ffd317d06ac7, 'name': SearchDatastore_Task, 'duration_secs': 0.00889} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.807496] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.807731] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1025.807962] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.808125] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.808328] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1025.808591] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83dbc3b2-e8dd-48ac-b6da-b957eb910640 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.815856] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1025.816028] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1025.816677] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcadd59d-fdfe-4b3a-a1d2-944b10c01833 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.821362] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1025.821362] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf49ae-1bc0-75fc-dfb1-a3f9f0d38d9a" [ 1025.821362] env[61972]: _type = "Task" [ 1025.821362] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.828114] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf49ae-1bc0-75fc-dfb1-a3f9f0d38d9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.035302] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1026.064183] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1026.064481] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1026.064642] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.064822] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1026.064969] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.065132] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1026.065345] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1026.065513] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1026.065682] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1026.065844] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1026.066022] env[61972]: DEBUG nova.virt.hardware [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.066902] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929a0541-74b5-4118-8288-3a63a6b17880 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.074714] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ede3694-ec25-474a-85c2-3e42f1faf74e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.097883] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-fe623f2c-1fd9-43f0-be96-29bb252e0171" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.098184] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Instance network_info: |[{"id": "bbcf3c01-de4f-46b2-af22-eb28c8a3bcde", "address": "fa:16:3e:f3:48:03", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf3c01-de", "ovs_interfaceid": "bbcf3c01-de4f-46b2-af22-eb28c8a3bcde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1026.098563] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:48:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbcf3c01-de4f-46b2-af22-eb28c8a3bcde', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.105996] env[61972]: DEBUG oslo.service.loopingcall [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.106209] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.106417] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dabd237-04ba-4cd9-abb2-243b625c54a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.125594] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.125594] env[61972]: value = "task-1389624" [ 1026.125594] env[61972]: _type = "Task" [ 1026.125594] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.132946] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389624, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.145839] env[61972]: DEBUG nova.scheduler.client.report [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1026.334185] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bf49ae-1bc0-75fc-dfb1-a3f9f0d38d9a, 'name': SearchDatastore_Task, 'duration_secs': 0.008628} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.335056] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1905675a-2364-441e-9663-1d8aa4b13906 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.340117] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1026.340117] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ecbca3-3356-b6d3-8e22-c62e83ce14b9" [ 1026.340117] env[61972]: _type = "Task" [ 1026.340117] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.347734] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ecbca3-3356-b6d3-8e22-c62e83ce14b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.361754] env[61972]: DEBUG nova.network.neutron [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Successfully updated port: 984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.371034] env[61972]: DEBUG nova.compute.manager [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Received event network-changed-bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1026.371242] env[61972]: DEBUG nova.compute.manager [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Refreshing instance network info cache due to event network-changed-bbcf3c01-de4f-46b2-af22-eb28c8a3bcde. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1026.371501] env[61972]: DEBUG oslo_concurrency.lockutils [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] Acquiring lock "refresh_cache-fe623f2c-1fd9-43f0-be96-29bb252e0171" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.371679] env[61972]: DEBUG oslo_concurrency.lockutils [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] Acquired lock "refresh_cache-fe623f2c-1fd9-43f0-be96-29bb252e0171" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.371897] env[61972]: DEBUG nova.network.neutron [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Refreshing network info cache for port bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.636687] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389624, 'name': CreateVM_Task, 'duration_secs': 0.296876} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.636907] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.637613] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.637788] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.638157] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1026.638430] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f515cbd-1bd1-4501-af3e-573237a4c368 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.643027] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1026.643027] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bd92cc-0d84-9b44-7971-729b29ffe85a" [ 1026.643027] env[61972]: _type = "Task" [ 1026.643027] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.651263] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.652991] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bd92cc-0d84-9b44-7971-729b29ffe85a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.673302] env[61972]: INFO nova.scheduler.client.report [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Deleted allocations for instance 86d022e2-bd02-45cd-a9dd-362e912dd8e1 [ 1026.851036] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52ecbca3-3356-b6d3-8e22-c62e83ce14b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.851340] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.851614] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d/66d0dc08-e8e7-4bf5-884a-67f65e8e109d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1026.851863] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce88555c-a95b-4536-b90d-3149b73d0b1b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.857630] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1026.857630] env[61972]: value = "task-1389625" [ 1026.857630] env[61972]: _type = "Task" [ 1026.857630] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.864722] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.864857] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.865010] env[61972]: DEBUG nova.network.neutron [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.866065] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.080729] env[61972]: DEBUG nova.network.neutron [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Updated VIF entry in instance network info cache for port bbcf3c01-de4f-46b2-af22-eb28c8a3bcde. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.081200] env[61972]: DEBUG nova.network.neutron [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Updating instance_info_cache with network_info: [{"id": "bbcf3c01-de4f-46b2-af22-eb28c8a3bcde", "address": "fa:16:3e:f3:48:03", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcf3c01-de", "ovs_interfaceid": "bbcf3c01-de4f-46b2-af22-eb28c8a3bcde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.153771] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52bd92cc-0d84-9b44-7971-729b29ffe85a, 'name': SearchDatastore_Task, 'duration_secs': 0.008272} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.154183] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.154512] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.154783] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.154978] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.155190] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.155465] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e754470-0bc5-4c66-83b2-9d57db3ce57d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.170312] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.170473] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.171338] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52dbe6a1-1012-40a3-b033-70ebaf46523e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.177242] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1027.177242] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5230a2de-778c-1538-4062-f23d191b3eac" [ 1027.177242] env[61972]: _type = "Task" [ 1027.177242] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.183750] env[61972]: DEBUG oslo_concurrency.lockutils [None req-fb2c6b00-c795-434e-bd89-73d80aab4066 tempest-ServerDiskConfigTestJSON-661301000 tempest-ServerDiskConfigTestJSON-661301000-project-member] Lock "86d022e2-bd02-45cd-a9dd-362e912dd8e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.566s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.188389] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5230a2de-778c-1538-4062-f23d191b3eac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.368398] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473045} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.368689] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d/66d0dc08-e8e7-4bf5-884a-67f65e8e109d.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.368905] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.369533] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb347177-ed3d-475e-a7aa-235a389a9678 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.376251] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1027.376251] env[61972]: value = "task-1389626" [ 1027.376251] env[61972]: _type = "Task" [ 1027.376251] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.384677] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.396573] env[61972]: DEBUG nova.network.neutron [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1027.566579] env[61972]: DEBUG nova.network.neutron [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updating instance_info_cache with network_info: [{"id": "984fa809-bf48-4083-bd47-872fafdec46a", "address": "fa:16:3e:04:b4:67", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984fa809-bf", "ovs_interfaceid": "984fa809-bf48-4083-bd47-872fafdec46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.584802] env[61972]: DEBUG oslo_concurrency.lockutils [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] Releasing lock "refresh_cache-fe623f2c-1fd9-43f0-be96-29bb252e0171" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.584802] env[61972]: DEBUG nova.compute.manager [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Received event network-vif-plugged-984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1027.584802] env[61972]: DEBUG oslo_concurrency.lockutils [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.584802] env[61972]: DEBUG oslo_concurrency.lockutils [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.585042] env[61972]: DEBUG oslo_concurrency.lockutils [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.585130] env[61972]: DEBUG nova.compute.manager [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] No waiting events found dispatching network-vif-plugged-984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.585356] env[61972]: WARNING nova.compute.manager [req-61a73faf-267c-4f56-9632-813a27ac7d50 req-5c5b2605-ee15-450d-913b-b34c15f250b4 service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Received unexpected event network-vif-plugged-984fa809-bf48-4083-bd47-872fafdec46a for instance with vm_state building and task_state spawning. [ 1027.687471] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5230a2de-778c-1538-4062-f23d191b3eac, 'name': SearchDatastore_Task, 'duration_secs': 0.053483} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.688469] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e48914a8-d0b1-441b-9ef8-c9a74b0783d5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.693811] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1027.693811] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52965cea-311f-7254-b31f-e5ed4037abe6" [ 1027.693811] env[61972]: _type = "Task" [ 1027.693811] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.700962] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52965cea-311f-7254-b31f-e5ed4037abe6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.885251] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065588} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.885541] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.886347] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f1b0c9-f362-45e7-b954-0c9e275dcb0b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.909652] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d/66d0dc08-e8e7-4bf5-884a-67f65e8e109d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.909652] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82bf22de-853f-4464-8746-10fcf2eb1fb8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.930694] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1027.930694] env[61972]: value = "task-1389627" [ 1027.930694] env[61972]: _type = "Task" [ 1027.930694] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.942971] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389627, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.070545] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.070545] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Instance network_info: |[{"id": "984fa809-bf48-4083-bd47-872fafdec46a", "address": "fa:16:3e:04:b4:67", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984fa809-bf", "ovs_interfaceid": "984fa809-bf48-4083-bd47-872fafdec46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1028.070717] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:b4:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '984fa809-bf48-4083-bd47-872fafdec46a', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.079827] env[61972]: DEBUG oslo.service.loopingcall [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.080436] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.080761] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49a48822-39dd-473c-83c5-f9e42e37e3d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.096741] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.097008] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.097220] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.097404] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.097576] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.099923] env[61972]: INFO nova.compute.manager [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Terminating instance [ 1028.105226] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.105226] env[61972]: value = "task-1389628" [ 1028.105226] env[61972]: _type = "Task" [ 1028.105226] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.116495] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389628, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.204452] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52965cea-311f-7254-b31f-e5ed4037abe6, 'name': SearchDatastore_Task, 'duration_secs': 0.054054} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.204784] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.205106] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] fe623f2c-1fd9-43f0-be96-29bb252e0171/fe623f2c-1fd9-43f0-be96-29bb252e0171.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.205392] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dadf1f42-1162-4a93-8b9d-46fe0688f88a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.212825] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1028.212825] env[61972]: value = "task-1389629" [ 1028.212825] env[61972]: _type = "Task" [ 1028.212825] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.221919] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.310308] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.312400] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.400228] env[61972]: DEBUG nova.compute.manager [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Received event network-changed-984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1028.400228] env[61972]: DEBUG nova.compute.manager [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Refreshing instance network info cache due to event network-changed-984fa809-bf48-4083-bd47-872fafdec46a. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1028.401072] env[61972]: DEBUG oslo_concurrency.lockutils [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] Acquiring lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.401441] env[61972]: DEBUG oslo_concurrency.lockutils [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] Acquired lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.401764] env[61972]: DEBUG nova.network.neutron [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Refreshing network info cache for port 984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.442085] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389627, 'name': ReconfigVM_Task, 'duration_secs': 0.323793} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.442739] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d/66d0dc08-e8e7-4bf5-884a-67f65e8e109d.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.443713] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c504c748-b7b1-4559-b0c5-1529798f9d27 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.451700] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1028.451700] env[61972]: value = "task-1389630" [ 1028.451700] env[61972]: _type = "Task" [ 1028.451700] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.463535] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389630, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.604930] env[61972]: DEBUG nova.compute.manager [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1028.605185] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.606270] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed041995-edbb-4e83-9d60-a418cde20785 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.618472] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389628, 'name': CreateVM_Task, 'duration_secs': 0.414284} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.620633] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.621092] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.622477] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.622726] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.623090] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1028.623502] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e0ba13b-0143-4efe-8eeb-2cb7d4ac7e64 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.625727] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9d163a9-a98b-4fbb-9a76-133333cb3d54 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.634723] env[61972]: DEBUG oslo_vmware.api [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 1028.634723] env[61972]: value = "task-1389631" [ 1028.634723] env[61972]: _type = "Task" [ 1028.634723] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.635078] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1028.635078] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]525b5f9e-0a6c-7901-7862-85820a1c6cc4" [ 1028.635078] env[61972]: _type = "Task" [ 1028.635078] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.648858] env[61972]: DEBUG oslo_vmware.api [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.652613] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525b5f9e-0a6c-7901-7862-85820a1c6cc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.723797] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389629, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473697} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.724055] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] fe623f2c-1fd9-43f0-be96-29bb252e0171/fe623f2c-1fd9-43f0-be96-29bb252e0171.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.724331] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.724542] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26239d32-2f85-4b39-8438-d06242337aa3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.731379] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1028.731379] env[61972]: value = "task-1389632" [ 1028.731379] env[61972]: _type = "Task" [ 1028.731379] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.738033] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.814118] env[61972]: INFO nova.compute.manager [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Detaching volume 49e34489-2d91-47b5-b285-958e3c1e5401 [ 1028.858232] env[61972]: INFO nova.virt.block_device [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Attempting to driver detach volume 49e34489-2d91-47b5-b285-958e3c1e5401 from mountpoint /dev/sdb [ 1028.858409] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Volume detach. Driver type: vmdk {{(pid=61972) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1028.858600] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294912', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'name': 'volume-49e34489-2d91-47b5-b285-958e3c1e5401', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd2864436-05a3-421f-98fd-41df925727c6', 'attached_at': '', 'detached_at': '', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'serial': '49e34489-2d91-47b5-b285-958e3c1e5401'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1028.859508] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114ee63f-13af-46dc-a305-2db49b1c0c3b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.881089] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b599586-3b9f-4745-bd7e-0a1acf2c368a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.887710] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72769e7e-565f-4274-99d4-bf84b43f723a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.908856] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cdaeae-e633-4dea-9c6a-d898c4ebcb3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.923348] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] The volume has not been displaced from its original location: [datastore1] volume-49e34489-2d91-47b5-b285-958e3c1e5401/volume-49e34489-2d91-47b5-b285-958e3c1e5401.vmdk. No consolidation needed. {{(pid=61972) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1028.928731] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1028.929035] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6ea5616-7ce8-41d0-927e-a462b2f2a40a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.948336] env[61972]: DEBUG oslo_vmware.api [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1028.948336] env[61972]: value = "task-1389633" [ 1028.948336] env[61972]: _type = "Task" [ 1028.948336] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.954061] env[61972]: DEBUG oslo_vmware.api [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.962014] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389630, 'name': Rename_Task, 'duration_secs': 0.228329} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.964287] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.964287] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e78c556e-3834-45c0-810e-efee6a9642a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.971021] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1028.971021] env[61972]: value = "task-1389634" [ 1028.971021] env[61972]: _type = "Task" [ 1028.971021] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.978408] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.151021] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]525b5f9e-0a6c-7901-7862-85820a1c6cc4, 'name': SearchDatastore_Task, 'duration_secs': 0.042231} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.154233] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.154548] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.154812] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.154957] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.155143] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.155408] env[61972]: DEBUG oslo_vmware.api [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389631, 'name': PowerOffVM_Task, 'duration_secs': 0.192722} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.157821] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0680642f-1049-49f3-95c7-835a434c73a8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.160018] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.160018] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.160018] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3a6f909-052d-4675-b2f1-1a4a991b9df6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.169932] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.170123] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.170827] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24e19156-6a5f-4bc1-9755-29b20e431c72 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.176802] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1029.176802] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fa4da0-7f17-6c34-998c-aada5f660b6d" [ 1029.176802] env[61972]: _type = "Task" [ 1029.176802] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.184550] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fa4da0-7f17-6c34-998c-aada5f660b6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.226634] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.226879] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.226962] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleting the datastore file [datastore1] 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.227339] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae74413a-7a0b-4704-ae28-3d63bfbe3f2f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.230621] env[61972]: DEBUG nova.network.neutron [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updated VIF entry in instance network info cache for port 984fa809-bf48-4083-bd47-872fafdec46a. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1029.230942] env[61972]: DEBUG nova.network.neutron [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updating instance_info_cache with network_info: [{"id": "984fa809-bf48-4083-bd47-872fafdec46a", "address": "fa:16:3e:04:b4:67", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984fa809-bf", "ovs_interfaceid": "984fa809-bf48-4083-bd47-872fafdec46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.236725] env[61972]: DEBUG oslo_vmware.api [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for the task: (returnval){ [ 1029.236725] env[61972]: value = "task-1389636" [ 1029.236725] env[61972]: _type = "Task" [ 1029.236725] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.242925] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06445} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.244091] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.244852] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19525ecf-1421-4158-ab77-e1374eca46c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.250311] env[61972]: DEBUG oslo_vmware.api [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.270317] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] fe623f2c-1fd9-43f0-be96-29bb252e0171/fe623f2c-1fd9-43f0-be96-29bb252e0171.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.270899] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcb68e5d-5bf2-4af4-af38-691985498f9f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.291033] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1029.291033] env[61972]: value = "task-1389637" [ 1029.291033] env[61972]: _type = "Task" [ 1029.291033] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.299261] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389637, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.458587] env[61972]: DEBUG oslo_vmware.api [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389633, 'name': ReconfigVM_Task, 'duration_secs': 0.239709} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.458910] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1029.463656] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61be17af-aa3d-43ce-ab80-a56a9e61e5d9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.483795] env[61972]: DEBUG oslo_vmware.api [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389634, 'name': PowerOnVM_Task, 'duration_secs': 0.510053} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.485129] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.485572] env[61972]: INFO nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Took 7.83 seconds to spawn the instance on the hypervisor. [ 1029.485572] env[61972]: DEBUG nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1029.485901] env[61972]: DEBUG oslo_vmware.api [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1029.485901] env[61972]: value = "task-1389638" [ 1029.485901] env[61972]: _type = "Task" [ 1029.485901] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.486706] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6d1438-266f-4ffa-91f9-ed1acf278f35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.501152] env[61972]: DEBUG oslo_vmware.api [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.687169] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fa4da0-7f17-6c34-998c-aada5f660b6d, 'name': SearchDatastore_Task, 'duration_secs': 0.008457} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.688469] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-082f65e6-63c3-4ec3-a432-25b2cafddde1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.696862] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1029.696862] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]521bdaa0-82a0-16de-8781-72cd2c3b5a84" [ 1029.696862] env[61972]: _type = "Task" [ 1029.696862] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.703140] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]521bdaa0-82a0-16de-8781-72cd2c3b5a84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.737947] env[61972]: DEBUG oslo_concurrency.lockutils [req-be7d8020-48b2-4a4c-b909-fb599b805ba0 req-a048b9b1-0da4-4049-8139-055c11a73c6b service nova] Releasing lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.746525] env[61972]: DEBUG oslo_vmware.api [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Task: {'id': task-1389636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143478} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.746782] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.746967] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.747163] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.747340] env[61972]: INFO nova.compute.manager [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1029.747586] env[61972]: DEBUG oslo.service.loopingcall [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.747776] env[61972]: DEBUG nova.compute.manager [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1029.748073] env[61972]: DEBUG nova.network.neutron [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.800834] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389637, 'name': ReconfigVM_Task, 'duration_secs': 0.276985} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.801151] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Reconfigured VM instance instance-00000065 to attach disk [datastore2] fe623f2c-1fd9-43f0-be96-29bb252e0171/fe623f2c-1fd9-43f0-be96-29bb252e0171.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.801798] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75f718fc-35b0-47ff-a90c-8b45adf33c4a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.808194] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1029.808194] env[61972]: value = "task-1389639" [ 1029.808194] env[61972]: _type = "Task" [ 1029.808194] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.817854] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389639, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.998876] env[61972]: DEBUG oslo_vmware.api [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389638, 'name': ReconfigVM_Task, 'duration_secs': 0.139571} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.999687] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294912', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'name': 'volume-49e34489-2d91-47b5-b285-958e3c1e5401', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd2864436-05a3-421f-98fd-41df925727c6', 'attached_at': '', 'detached_at': '', 'volume_id': '49e34489-2d91-47b5-b285-958e3c1e5401', 'serial': '49e34489-2d91-47b5-b285-958e3c1e5401'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1030.011102] env[61972]: INFO nova.compute.manager [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Took 14.47 seconds to build instance. [ 1030.209029] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]521bdaa0-82a0-16de-8781-72cd2c3b5a84, 'name': SearchDatastore_Task, 'duration_secs': 0.011249} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.209029] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.209029] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 63821242-c34e-4ed1-8ed3-f7f445ffe322/63821242-c34e-4ed1-8ed3-f7f445ffe322.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.209029] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b5dd3cd-ec7b-423a-b4b0-3b02deaa9419 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.214715] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1030.214715] env[61972]: value = "task-1389640" [ 1030.214715] env[61972]: _type = "Task" [ 1030.214715] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.223261] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.318039] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389639, 'name': Rename_Task, 'duration_secs': 0.146697} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.318223] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.318470] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-495ddc73-9125-45c7-8860-fd34a9ea669e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.323607] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1030.323607] env[61972]: value = "task-1389641" [ 1030.323607] env[61972]: _type = "Task" [ 1030.323607] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.331186] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.438538] env[61972]: DEBUG nova.compute.manager [req-3385f1aa-19bc-4b0b-8c7e-99b9a8f49879 req-de09e427-2566-402e-b600-29e6ec09648c service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Received event network-vif-deleted-89e228e1-2aac-4e05-98ee-5c29dd44f55b {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1030.438538] env[61972]: INFO nova.compute.manager [req-3385f1aa-19bc-4b0b-8c7e-99b9a8f49879 req-de09e427-2566-402e-b600-29e6ec09648c service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Neutron deleted interface 89e228e1-2aac-4e05-98ee-5c29dd44f55b; detaching it from the instance and deleting it from the info cache [ 1030.438623] env[61972]: DEBUG nova.network.neutron [req-3385f1aa-19bc-4b0b-8c7e-99b9a8f49879 req-de09e427-2566-402e-b600-29e6ec09648c service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.514020] env[61972]: DEBUG oslo_concurrency.lockutils [None req-e7880b06-aaf7-4bdb-894d-41fbe5ae1906 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.980s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.559426] env[61972]: DEBUG nova.objects.instance [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.725662] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389640, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480741} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.725945] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 63821242-c34e-4ed1-8ed3-f7f445ffe322/63821242-c34e-4ed1-8ed3-f7f445ffe322.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.726202] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.726489] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91f48288-3346-4486-8f3f-ad09b66f68a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.733093] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1030.733093] env[61972]: value = "task-1389642" [ 1030.733093] env[61972]: _type = "Task" [ 1030.733093] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.740978] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.804633] env[61972]: DEBUG nova.network.neutron [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.833299] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389641, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.941679] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da40320d-cdb9-408e-8280-f52605d3b69c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.951475] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8567fa-a790-4aed-8b5e-978da04cbe06 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.981746] env[61972]: DEBUG nova.compute.manager [req-3385f1aa-19bc-4b0b-8c7e-99b9a8f49879 req-de09e427-2566-402e-b600-29e6ec09648c service nova] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Detach interface failed, port_id=89e228e1-2aac-4e05-98ee-5c29dd44f55b, reason: Instance 56e21cf4-4dbc-4f72-97c0-082dd689c046 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1031.242793] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081374} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.243190] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.243953] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d63f2d1-08ef-4e6f-9bcb-02051808b68f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.266948] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 63821242-c34e-4ed1-8ed3-f7f445ffe322/63821242-c34e-4ed1-8ed3-f7f445ffe322.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.267212] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae625050-81cf-426e-93f4-f6e72f0bcdac {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.286875] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1031.286875] env[61972]: value = "task-1389643" [ 1031.286875] env[61972]: _type = "Task" [ 1031.286875] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.294939] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.309494] env[61972]: INFO nova.compute.manager [-] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Took 1.56 seconds to deallocate network for instance. [ 1031.333710] env[61972]: DEBUG oslo_vmware.api [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389641, 'name': PowerOnVM_Task, 'duration_secs': 0.683084} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.334433] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.334677] env[61972]: INFO nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Took 7.50 seconds to spawn the instance on the hypervisor. [ 1031.334861] env[61972]: DEBUG nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1031.335723] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c1d37a-428d-4723-a387-2d515743d96b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.521737] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.567026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-01302867-bd1c-4b63-b837-1e81063022f0 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.255s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.567912] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.046s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.568122] env[61972]: DEBUG nova.compute.manager [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1031.569219] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e15981-e893-4f6e-9aee-d60e1b28ac66 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.575417] env[61972]: DEBUG nova.compute.manager [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61972) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1031.575988] env[61972]: DEBUG nova.objects.instance [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.798796] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389643, 'name': ReconfigVM_Task, 'duration_secs': 0.32419} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.799065] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 63821242-c34e-4ed1-8ed3-f7f445ffe322/63821242-c34e-4ed1-8ed3-f7f445ffe322.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.799694] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72c6ddfd-8699-4465-baa1-1e8b1bbecb54 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.805724] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1031.805724] env[61972]: value = "task-1389644" [ 1031.805724] env[61972]: _type = "Task" [ 1031.805724] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.813929] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389644, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.815930] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.816161] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.816385] env[61972]: DEBUG nova.objects.instance [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lazy-loading 'resources' on Instance uuid 56e21cf4-4dbc-4f72-97c0-082dd689c046 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.853030] env[61972]: INFO nova.compute.manager [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Took 14.78 seconds to build instance. [ 1031.902850] env[61972]: DEBUG nova.compute.manager [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Stashing vm_state: active {{(pid=61972) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 1032.316668] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389644, 'name': Rename_Task, 'duration_secs': 0.142924} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.317021] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.317312] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f416fac1-fc29-4fd1-af41-b79df1f1c942 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.326343] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1032.326343] env[61972]: value = "task-1389645" [ 1032.326343] env[61972]: _type = "Task" [ 1032.326343] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.334130] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.355801] env[61972]: DEBUG oslo_concurrency.lockutils [None req-89faa76b-36f3-46a7-9d25-e32739e841d7 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.285s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.420233] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a21f273-3573-4ddb-951d-a8804d6eb943 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.423507] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.428776] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bef4d1c-bafa-4a6c-92c9-f0ba55a6d760 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.460438] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d24597-1188-4488-a66c-b476d13fc18a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.471311] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c194c6be-8137-4922-9033-850822a6e9dd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.486911] env[61972]: DEBUG nova.compute.provider_tree [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.583194] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.583550] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86691650-617e-4ccb-b7a4-f1c8af6e955d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.590844] env[61972]: DEBUG oslo_vmware.api [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1032.590844] env[61972]: value = "task-1389646" [ 1032.590844] env[61972]: _type = "Task" [ 1032.590844] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.598983] env[61972]: DEBUG oslo_vmware.api [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.838129] env[61972]: DEBUG oslo_vmware.api [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389645, 'name': PowerOnVM_Task, 'duration_secs': 0.476636} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.838129] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.838129] env[61972]: INFO nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1032.838358] env[61972]: DEBUG nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1032.838974] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ef69f2-b1f8-4182-a896-af0f2a55af01 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.966427] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "fe623f2c-1fd9-43f0-be96-29bb252e0171" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.966928] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.967294] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "fe623f2c-1fd9-43f0-be96-29bb252e0171-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.967648] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.967962] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.970969] env[61972]: INFO nova.compute.manager [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Terminating instance [ 1032.988623] env[61972]: DEBUG nova.scheduler.client.report [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1033.101661] env[61972]: DEBUG oslo_vmware.api [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389646, 'name': PowerOffVM_Task, 'duration_secs': 0.370944} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.101894] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.102112] env[61972]: DEBUG nova.compute.manager [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1033.102866] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefe0c5c-1b6f-4f04-962f-620e1acaac7a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.356941] env[61972]: INFO nova.compute.manager [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Took 12.50 seconds to build instance. [ 1033.475424] env[61972]: DEBUG nova.compute.manager [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1033.475657] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.476561] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a02eb5a-7f48-4493-87d4-b47794288bd2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.485054] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.485054] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b8188f2-e433-46f9-8061-ea8a967508dd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.490382] env[61972]: DEBUG oslo_vmware.api [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1033.490382] env[61972]: value = "task-1389647" [ 1033.490382] env[61972]: _type = "Task" [ 1033.490382] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.494351] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.496304] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.073s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.503910] env[61972]: DEBUG oslo_vmware.api [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.513664] env[61972]: INFO nova.scheduler.client.report [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Deleted allocations for instance 56e21cf4-4dbc-4f72-97c0-082dd689c046 [ 1033.614451] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ee64576f-691a-41e5-8b1e-aa508233e4c3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.858575] env[61972]: DEBUG oslo_concurrency.lockutils [None req-774e6ff7-10b9-48a3-8d9e-383259e0101b tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.013s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.928354] env[61972]: DEBUG nova.objects.instance [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.002393] env[61972]: INFO nova.compute.claims [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.005906] env[61972]: DEBUG oslo_vmware.api [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389647, 'name': PowerOffVM_Task, 'duration_secs': 0.254147} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.006615] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.006799] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1034.007061] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12122880-88fb-43e9-a9f5-66c899c9a56b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.020509] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ff6fc3fb-493e-405e-a68d-b7d554850f7c tempest-AttachVolumeShelveTestJSON-12225270 tempest-AttachVolumeShelveTestJSON-12225270-project-member] Lock "56e21cf4-4dbc-4f72-97c0-082dd689c046" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.923s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.067565] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.067802] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.067986] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore2] fe623f2c-1fd9-43f0-be96-29bb252e0171 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.068272] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22fcaff8-c91d-48a1-8f70-3358b60ce836 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.075012] env[61972]: DEBUG oslo_vmware.api [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1034.075012] env[61972]: value = "task-1389649" [ 1034.075012] env[61972]: _type = "Task" [ 1034.075012] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.083346] env[61972]: DEBUG oslo_vmware.api [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.254081] env[61972]: DEBUG nova.compute.manager [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Received event network-changed-984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1034.254305] env[61972]: DEBUG nova.compute.manager [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Refreshing instance network info cache due to event network-changed-984fa809-bf48-4083-bd47-872fafdec46a. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1034.254557] env[61972]: DEBUG oslo_concurrency.lockutils [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] Acquiring lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.254704] env[61972]: DEBUG oslo_concurrency.lockutils [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] Acquired lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.254865] env[61972]: DEBUG nova.network.neutron [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Refreshing network info cache for port 984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.433180] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.433541] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.433579] env[61972]: DEBUG nova.network.neutron [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.433773] env[61972]: DEBUG nova.objects.instance [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'info_cache' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.508161] env[61972]: INFO nova.compute.resource_tracker [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating resource usage from migration c3052a8b-3533-4c20-90f9-e0bb7e55a321 [ 1034.587130] env[61972]: DEBUG oslo_vmware.api [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146869} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.589298] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.589502] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.589681] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.589851] env[61972]: INFO nova.compute.manager [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1034.590218] env[61972]: DEBUG oslo.service.loopingcall [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.590609] env[61972]: DEBUG nova.compute.manager [-] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1034.590704] env[61972]: DEBUG nova.network.neutron [-] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.616437] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe3dbff-83df-4311-8bc5-26a63284f14e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.624945] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68fc82e-7668-45f3-806e-abcdb539c522 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.662510] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd2f688-69b6-4e4b-bfcc-989162c47848 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.670477] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d1a95d-4316-4380-a8c6-26be85c38043 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.684217] env[61972]: DEBUG nova.compute.provider_tree [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.937188] env[61972]: DEBUG nova.objects.base [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1034.993565] env[61972]: DEBUG nova.network.neutron [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updated VIF entry in instance network info cache for port 984fa809-bf48-4083-bd47-872fafdec46a. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.993983] env[61972]: DEBUG nova.network.neutron [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updating instance_info_cache with network_info: [{"id": "984fa809-bf48-4083-bd47-872fafdec46a", "address": "fa:16:3e:04:b4:67", "network": {"id": "c74365c9-d7d6-401a-a7fb-98f833ef744e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-480970775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "651d8f34661542219f5451bce866ec02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984fa809-bf", "ovs_interfaceid": "984fa809-bf48-4083-bd47-872fafdec46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.130172] env[61972]: DEBUG nova.compute.manager [req-8a7d29e1-2466-439d-8178-7cd1b16dd743 req-3119c776-ddb9-42e4-99dd-e9893279be32 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Received event network-vif-deleted-bbcf3c01-de4f-46b2-af22-eb28c8a3bcde {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1035.130172] env[61972]: INFO nova.compute.manager [req-8a7d29e1-2466-439d-8178-7cd1b16dd743 req-3119c776-ddb9-42e4-99dd-e9893279be32 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Neutron deleted interface bbcf3c01-de4f-46b2-af22-eb28c8a3bcde; detaching it from the instance and deleting it from the info cache [ 1035.130330] env[61972]: DEBUG nova.network.neutron [req-8a7d29e1-2466-439d-8178-7cd1b16dd743 req-3119c776-ddb9-42e4-99dd-e9893279be32 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.189399] env[61972]: DEBUG nova.scheduler.client.report [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1035.496443] env[61972]: DEBUG oslo_concurrency.lockutils [req-1787fdd5-e116-464e-83b8-136819d0e586 req-b90616dc-e43c-4006-a4e8-9a32c2cee1ca service nova] Releasing lock "refresh_cache-63821242-c34e-4ed1-8ed3-f7f445ffe322" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.609850] env[61972]: DEBUG nova.network.neutron [-] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.632886] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c37e40ef-315a-48e1-b4ac-febf22379e17 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.644571] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0a3219-e8ba-4620-a9ee-b6790dacca1d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.661162] env[61972]: DEBUG nova.network.neutron [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [{"id": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "address": "fa:16:3e:5b:7e:e2", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98807bc5-c5", "ovs_interfaceid": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.672121] env[61972]: DEBUG nova.compute.manager [req-8a7d29e1-2466-439d-8178-7cd1b16dd743 req-3119c776-ddb9-42e4-99dd-e9893279be32 service nova] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Detach interface failed, port_id=bbcf3c01-de4f-46b2-af22-eb28c8a3bcde, reason: Instance fe623f2c-1fd9-43f0-be96-29bb252e0171 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1035.694833] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.198s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.695134] env[61972]: INFO nova.compute.manager [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Migrating [ 1036.114173] env[61972]: INFO nova.compute.manager [-] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Took 1.52 seconds to deallocate network for instance. [ 1036.164296] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.211598] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.211810] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.211993] env[61972]: DEBUG nova.network.neutron [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1036.620696] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.620982] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.621230] env[61972]: DEBUG nova.objects.instance [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'resources' on Instance uuid fe623f2c-1fd9-43f0-be96-29bb252e0171 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.019452] env[61972]: DEBUG nova.network.neutron [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [{"id": "2cc52359-688b-48a0-8436-a1d5cfd37738", "address": "fa:16:3e:ed:78:92", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc52359-68", "ovs_interfaceid": "2cc52359-688b-48a0-8436-a1d5cfd37738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.170705] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.173912] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a4a945d-2129-43f8-8e83-fbdabff2d1e5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.183692] env[61972]: DEBUG oslo_vmware.api [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1037.183692] env[61972]: value = "task-1389650" [ 1037.183692] env[61972]: _type = "Task" [ 1037.183692] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.195749] env[61972]: DEBUG oslo_vmware.api [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.224269] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad59cd7-4c84-4ac7-ab4b-7b961dae4dad {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.235195] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb5d891-c4d5-4c18-ab8b-a6393ba10a3f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.280883] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3555f9ff-ab15-437e-8b91-8b18f2517472 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.292050] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5049abe8-a199-4272-a5b6-47707e32e122 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.308854] env[61972]: DEBUG nova.compute.provider_tree [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.522389] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.694632] env[61972]: DEBUG oslo_vmware.api [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389650, 'name': PowerOnVM_Task, 'duration_secs': 0.4213} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.694975] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.695171] env[61972]: DEBUG nova.compute.manager [None req-ecb26b3a-32d9-48cd-9684-3b5e16930ff3 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1037.696127] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d21f48-a322-422c-b29e-7c3615d2496c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.813916] env[61972]: DEBUG nova.scheduler.client.report [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1038.318544] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.342622] env[61972]: INFO nova.scheduler.client.report [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance fe623f2c-1fd9-43f0-be96-29bb252e0171 [ 1038.851066] env[61972]: DEBUG oslo_concurrency.lockutils [None req-d9d2bf2c-a5c6-4124-b2f4-635979213193 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "fe623f2c-1fd9-43f0-be96-29bb252e0171" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.884s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.037575] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9044302b-faf1-4779-bbcc-20c5fafc8e3c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.056764] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 0 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1039.562514] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.562844] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ff10a33-239e-4ecd-8f71-9b7e60d4531c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.571570] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1039.571570] env[61972]: value = "task-1389651" [ 1039.571570] env[61972]: _type = "Task" [ 1039.571570] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.580986] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.081909] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389651, 'name': PowerOffVM_Task, 'duration_secs': 0.196118} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.082261] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.082408] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 17 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1040.129554] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.129794] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.589481] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1040.589779] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1040.589965] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.590184] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1040.590365] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.590550] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1040.590794] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1040.591035] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1040.591231] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1040.591426] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1040.591628] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1040.597475] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccb2ec20-05fc-4098-88f6-d857774065d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.616732] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1040.616732] env[61972]: value = "task-1389652" [ 1040.616732] env[61972]: _type = "Task" [ 1040.616732] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.626575] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389652, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.632182] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1041.127262] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389652, 'name': ReconfigVM_Task, 'duration_secs': 0.299098} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.127619] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 33 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1041.154685] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.154947] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.156379] env[61972]: INFO nova.compute.claims [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.634273] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1041.634555] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1041.634725] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.634933] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1041.635094] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.635250] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1041.635456] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1041.635617] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1041.635788] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1041.635956] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1041.636155] env[61972]: DEBUG nova.virt.hardware [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1041.641399] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1041.641850] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed0fa02-0f38-468f-923e-2d32b50c8d83 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.662563] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1041.662563] env[61972]: value = "task-1389653" [ 1041.662563] env[61972]: _type = "Task" [ 1041.662563] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.671101] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389653, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.176199] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389653, 'name': ReconfigVM_Task, 'duration_secs': 0.163644} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.176542] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1042.177317] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214e5d6c-0a71-490e-976b-2dcc027c9285 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.201463] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d/66d0dc08-e8e7-4bf5-884a-67f65e8e109d.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.203888] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a986c978-07d2-4eff-99cc-dadcbdfcf98e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.222615] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1042.222615] env[61972]: value = "task-1389654" [ 1042.222615] env[61972]: _type = "Task" [ 1042.222615] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.233927] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389654, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.276270] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d778a6-2756-40c9-8c42-9f6d0ea257cf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.284245] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bea8c2-ab35-4ceb-a0af-e855f80ceb0f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.313528] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39461d97-a4c1-4285-9e8f-c5b2caca73b9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.320992] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d100ff75-fe7c-476b-b688-50ffdb7ff734 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.334017] env[61972]: DEBUG nova.compute.provider_tree [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.733493] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389654, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.836949] env[61972]: DEBUG nova.scheduler.client.report [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1043.233988] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389654, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.342436] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.343081] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1043.734423] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389654, 'name': ReconfigVM_Task, 'duration_secs': 1.252068} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.734717] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d/66d0dc08-e8e7-4bf5-884a-67f65e8e109d.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.735022] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 50 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1043.847717] env[61972]: DEBUG nova.compute.utils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1043.849517] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1043.849693] env[61972]: DEBUG nova.network.neutron [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1043.897045] env[61972]: DEBUG nova.policy [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fa1cef9829b45f4bbe90e9882b8f8c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c57829399c5741c08c30bb60163148b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 1044.146848] env[61972]: DEBUG nova.network.neutron [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Successfully created port: eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1044.241524] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b16d1d2-9453-4b0c-b195-ba7f6c0d7dec {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.264293] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b17354c-0e0f-4cc8-8e70-6f6e4deef82a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.281974] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 67 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1044.353029] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1044.830376] env[61972]: DEBUG nova.network.neutron [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Port 2cc52359-688b-48a0-8436-a1d5cfd37738 binding to destination host cpu-1 is already ACTIVE {{(pid=61972) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1045.362411] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1045.393818] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1045.394107] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1045.394270] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.394457] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1045.394608] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.394776] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1045.395051] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1045.395225] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1045.395398] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1045.395561] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1045.395733] env[61972]: DEBUG nova.virt.hardware [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1045.396606] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ad8eba-e2ca-4d8e-ae0d-79970f8c269d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.406517] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961b089c-7a98-4682-b969-8bec1f1c2f96 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.574150] env[61972]: DEBUG nova.compute.manager [req-46c3528a-e83f-42ad-bca7-041d21c6e340 req-d61aae60-5919-475d-bcc0-f9d024640805 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Received event network-vif-plugged-eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1045.574150] env[61972]: DEBUG oslo_concurrency.lockutils [req-46c3528a-e83f-42ad-bca7-041d21c6e340 req-d61aae60-5919-475d-bcc0-f9d024640805 service nova] Acquiring lock "65da0898-7f48-4ebf-9627-ba4e2ac68447-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.574150] env[61972]: DEBUG oslo_concurrency.lockutils [req-46c3528a-e83f-42ad-bca7-041d21c6e340 req-d61aae60-5919-475d-bcc0-f9d024640805 service nova] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.574150] env[61972]: DEBUG oslo_concurrency.lockutils [req-46c3528a-e83f-42ad-bca7-041d21c6e340 req-d61aae60-5919-475d-bcc0-f9d024640805 service nova] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.574437] env[61972]: DEBUG nova.compute.manager [req-46c3528a-e83f-42ad-bca7-041d21c6e340 req-d61aae60-5919-475d-bcc0-f9d024640805 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] No waiting events found dispatching network-vif-plugged-eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.574957] env[61972]: WARNING nova.compute.manager [req-46c3528a-e83f-42ad-bca7-041d21c6e340 req-d61aae60-5919-475d-bcc0-f9d024640805 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Received unexpected event network-vif-plugged-eb788717-6160-4bbc-9baf-642b3580b386 for instance with vm_state building and task_state spawning. [ 1045.655204] env[61972]: DEBUG nova.network.neutron [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Successfully updated port: eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.854106] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.854347] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.854525] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.158673] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "refresh_cache-65da0898-7f48-4ebf-9627-ba4e2ac68447" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.158853] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "refresh_cache-65da0898-7f48-4ebf-9627-ba4e2ac68447" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.158930] env[61972]: DEBUG nova.network.neutron [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.689409] env[61972]: DEBUG nova.network.neutron [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1046.808427] env[61972]: DEBUG nova.network.neutron [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Updating instance_info_cache with network_info: [{"id": "eb788717-6160-4bbc-9baf-642b3580b386", "address": "fa:16:3e:16:8d:2d", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb788717-61", "ovs_interfaceid": "eb788717-6160-4bbc-9baf-642b3580b386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.885752] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.885945] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.886143] env[61972]: DEBUG nova.network.neutron [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.311232] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "refresh_cache-65da0898-7f48-4ebf-9627-ba4e2ac68447" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.311528] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Instance network_info: |[{"id": "eb788717-6160-4bbc-9baf-642b3580b386", "address": "fa:16:3e:16:8d:2d", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb788717-61", "ovs_interfaceid": "eb788717-6160-4bbc-9baf-642b3580b386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1047.311991] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:8d:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb788717-6160-4bbc-9baf-642b3580b386', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.319452] env[61972]: DEBUG oslo.service.loopingcall [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.319670] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.319903] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1ac9c00-790b-4677-9038-6231ca0aa37a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.341045] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.341045] env[61972]: value = "task-1389655" [ 1047.341045] env[61972]: _type = "Task" [ 1047.341045] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.349519] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389655, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.605355] env[61972]: DEBUG nova.compute.manager [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Received event network-changed-eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1047.605603] env[61972]: DEBUG nova.compute.manager [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Refreshing instance network info cache due to event network-changed-eb788717-6160-4bbc-9baf-642b3580b386. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1047.605704] env[61972]: DEBUG oslo_concurrency.lockutils [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] Acquiring lock "refresh_cache-65da0898-7f48-4ebf-9627-ba4e2ac68447" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.605852] env[61972]: DEBUG oslo_concurrency.lockutils [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] Acquired lock "refresh_cache-65da0898-7f48-4ebf-9627-ba4e2ac68447" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.606057] env[61972]: DEBUG nova.network.neutron [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Refreshing network info cache for port eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.629045] env[61972]: DEBUG nova.network.neutron [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [{"id": "2cc52359-688b-48a0-8436-a1d5cfd37738", "address": "fa:16:3e:ed:78:92", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc52359-68", "ovs_interfaceid": "2cc52359-688b-48a0-8436-a1d5cfd37738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.719017] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.719338] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.719457] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1047.851126] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389655, 'name': CreateVM_Task, 'duration_secs': 0.316528} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.851265] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1047.851930] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.852125] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.852460] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1047.852708] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1c522fd-0890-4d08-9a95-ff92e2f07117 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.857105] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1047.857105] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d17f56-1b16-dcbb-486b-807dc7bad5f9" [ 1047.857105] env[61972]: _type = "Task" [ 1047.857105] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.864651] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d17f56-1b16-dcbb-486b-807dc7bad5f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.131770] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.338769] env[61972]: DEBUG nova.network.neutron [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Updated VIF entry in instance network info cache for port eb788717-6160-4bbc-9baf-642b3580b386. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.339158] env[61972]: DEBUG nova.network.neutron [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Updating instance_info_cache with network_info: [{"id": "eb788717-6160-4bbc-9baf-642b3580b386", "address": "fa:16:3e:16:8d:2d", "network": {"id": "0ee63c2f-6734-4b12-88f2-59679c697d2d", "bridge": "br-int", "label": "tempest-ServersTestJSON-473222861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c57829399c5741c08c30bb60163148b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb788717-61", "ovs_interfaceid": "eb788717-6160-4bbc-9baf-642b3580b386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.367498] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d17f56-1b16-dcbb-486b-807dc7bad5f9, 'name': SearchDatastore_Task, 'duration_secs': 0.0108} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.367784] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.368089] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.368350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.368503] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.368684] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.368936] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93fb39e9-b7fe-4c93-b332-93136635182b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.377790] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.377961] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.378670] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49179355-b7fe-4c97-9f7a-45751bc9b5c5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.384057] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1048.384057] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]526721ac-a02b-d7c5-ee04-22609f1e4e19" [ 1048.384057] env[61972]: _type = "Task" [ 1048.384057] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.391900] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]526721ac-a02b-d7c5-ee04-22609f1e4e19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.656296] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5df6d9d-c598-4453-bb34-0903da2913c3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.675452] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9971cd-68a5-4666-b7f6-86f1ef728fa1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.682436] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 83 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1048.841864] env[61972]: DEBUG oslo_concurrency.lockutils [req-e4ff9a73-1909-453e-8735-2924cfaccb32 req-05d0654b-86d1-4033-a0c2-70e883927490 service nova] Releasing lock "refresh_cache-65da0898-7f48-4ebf-9627-ba4e2ac68447" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.896523] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]526721ac-a02b-d7c5-ee04-22609f1e4e19, 'name': SearchDatastore_Task, 'duration_secs': 0.009309} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.897259] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f66bd73-2d9f-41c9-b9f9-c431370eb985 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.902269] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1048.902269] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52699709-8211-c610-120a-f479e6991ce6" [ 1048.902269] env[61972]: _type = "Task" [ 1048.902269] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.909571] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52699709-8211-c610-120a-f479e6991ce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.188700] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.189059] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3b1baad-f74c-476a-9102-9e6de5c7de92 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.197791] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1049.197791] env[61972]: value = "task-1389656" [ 1049.197791] env[61972]: _type = "Task" [ 1049.197791] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.205693] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.414539] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52699709-8211-c610-120a-f479e6991ce6, 'name': SearchDatastore_Task, 'duration_secs': 0.009501} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.414853] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.415177] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 65da0898-7f48-4ebf-9627-ba4e2ac68447/65da0898-7f48-4ebf-9627-ba4e2ac68447.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1049.415463] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ce746f7-cbbc-468e-a82c-6c4cb3a26d39 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.422930] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1049.422930] env[61972]: value = "task-1389657" [ 1049.422930] env[61972]: _type = "Task" [ 1049.422930] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.431109] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.708153] env[61972]: DEBUG oslo_vmware.api [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389656, 'name': PowerOnVM_Task, 'duration_secs': 0.371698} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.708681] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.708800] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0c2e63-014c-418c-b250-e4a8c9074770 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance '66d0dc08-e8e7-4bf5-884a-67f65e8e109d' progress to 100 {{(pid=61972) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1049.935684] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478657} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.936019] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore2] 65da0898-7f48-4ebf-9627-ba4e2ac68447/65da0898-7f48-4ebf-9627-ba4e2ac68447.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.936231] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.936523] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc0dfe35-0c72-4ddb-bfb5-7dfff3193dce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.944541] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1049.944541] env[61972]: value = "task-1389658" [ 1049.944541] env[61972]: _type = "Task" [ 1049.944541] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.952804] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.232986] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Didn't find any instances for network info cache update. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1050.233155] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.233323] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.233472] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.233623] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.233766] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.233957] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.234087] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1050.234240] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.454613] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059016} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.455395] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.455690] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a0031a-72ef-48d5-9f6a-361d7e43ce35 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.477691] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 65da0898-7f48-4ebf-9627-ba4e2ac68447/65da0898-7f48-4ebf-9627-ba4e2ac68447.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.477691] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45f2466d-0c75-4e95-8ac0-2b1787e54c1a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.499323] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1050.499323] env[61972]: value = "task-1389659" [ 1050.499323] env[61972]: _type = "Task" [ 1050.499323] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.507821] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.738517] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.738517] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.738517] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.738517] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1050.738776] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f03f6b7-b5b1-4585-b2b8-b04e24b281a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.747387] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ac6dad-1496-44e6-b9cd-e5fa635b1ff4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.763149] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdeb81fe-5ad2-45fd-ab7a-2d87d8aa1454 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.770583] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb86f8f-3cf3-4672-81d4-e88192321b0d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.799269] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180187MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1050.799402] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.799585] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.010219] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389659, 'name': ReconfigVM_Task, 'duration_secs': 0.281806} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.010514] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 65da0898-7f48-4ebf-9627-ba4e2ac68447/65da0898-7f48-4ebf-9627-ba4e2ac68447.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.011107] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb25aea1-f470-4b6d-a881-b7ca9b2ef837 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.018338] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1051.018338] env[61972]: value = "task-1389660" [ 1051.018338] env[61972]: _type = "Task" [ 1051.018338] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.030714] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389660, 'name': Rename_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.526484] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.527896] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.527896] env[61972]: DEBUG nova.compute.manager [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Going to confirm migration 3 {{(pid=61972) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 1051.531307] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389660, 'name': Rename_Task, 'duration_secs': 0.145591} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.531754] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1051.532008] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f428691e-daae-43a8-81e6-38da334798c1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.540213] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1051.540213] env[61972]: value = "task-1389661" [ 1051.540213] env[61972]: _type = "Task" [ 1051.540213] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.549364] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.807983] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Applying migration context for instance 66d0dc08-e8e7-4bf5-884a-67f65e8e109d as it has an incoming, in-progress migration c3052a8b-3533-4c20-90f9-e0bb7e55a321. Migration status is finished {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1051.808892] env[61972]: INFO nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating resource usage from migration c3052a8b-3533-4c20-90f9-e0bb7e55a321 [ 1051.830448] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 72435dc4-eae1-4606-bb32-e7e8e282d0b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.830448] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance d2864436-05a3-421f-98fd-41df925727c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.830448] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 63821242-c34e-4ed1-8ed3-f7f445ffe322 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.830448] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Migration c3052a8b-3533-4c20-90f9-e0bb7e55a321 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1051.830656] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 66d0dc08-e8e7-4bf5-884a-67f65e8e109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.830656] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 65da0898-7f48-4ebf-9627-ba4e2ac68447 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.830656] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1051.830656] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1051.905798] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bfe767-a51e-4d72-95db-b0ddef84beb1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.913903] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f32bcc-49c8-49be-8b68-5899173c7dbe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.943859] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd85f757-1fb9-497f-acf2-43606baec112 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.952098] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c05b6a-2878-466c-995a-2f0a098ecbcc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.966362] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.050600] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.066068] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.066254] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquired lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.066461] env[61972]: DEBUG nova.network.neutron [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.066651] env[61972]: DEBUG nova.objects.instance [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'info_cache' on Instance uuid 66d0dc08-e8e7-4bf5-884a-67f65e8e109d {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.469578] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1052.552699] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389661, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.974731] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1052.975033] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.175s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.051458] env[61972]: DEBUG oslo_vmware.api [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389661, 'name': PowerOnVM_Task, 'duration_secs': 1.046217} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.051871] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.051986] env[61972]: INFO nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1053.052132] env[61972]: DEBUG nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1053.052879] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3424b0-2fa8-4e7a-b7b0-38b850212fbc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.263961] env[61972]: DEBUG nova.network.neutron [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [{"id": "2cc52359-688b-48a0-8436-a1d5cfd37738", "address": "fa:16:3e:ed:78:92", "network": {"id": "72713f04-7c7d-417c-b22a-c6d6c7cd9651", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093561550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3c052a272742808be2bcdc71d8f62f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e99c063c-0cb7-4db6-b077-114166cfe889", "external-id": "nsx-vlan-transportzone-462", "segmentation_id": 462, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cc52359-68", "ovs_interfaceid": "2cc52359-688b-48a0-8436-a1d5cfd37738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.568655] env[61972]: INFO nova.compute.manager [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Took 12.43 seconds to build instance. [ 1053.767194] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Releasing lock "refresh_cache-66d0dc08-e8e7-4bf5-884a-67f65e8e109d" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.767194] env[61972]: DEBUG nova.objects.instance [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lazy-loading 'migration_context' on Instance uuid 66d0dc08-e8e7-4bf5-884a-67f65e8e109d {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.070421] env[61972]: DEBUG oslo_concurrency.lockutils [None req-2277c41d-70a8-4a0f-8531-fda2de6133f1 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.940s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.269094] env[61972]: DEBUG nova.objects.base [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Object Instance<66d0dc08-e8e7-4bf5-884a-67f65e8e109d> lazy-loaded attributes: info_cache,migration_context {{(pid=61972) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1054.270084] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24581713-a5bc-48da-b956-6f70c20edcd3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.289808] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfe4181f-feb6-40f3-a86e-9365e350b0fb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.297022] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1054.297022] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fae9b7-49cd-9253-8522-0511b67cce4d" [ 1054.297022] env[61972]: _type = "Task" [ 1054.297022] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.304854] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fae9b7-49cd-9253-8522-0511b67cce4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.566798] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.567026] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.567266] env[61972]: DEBUG nova.compute.manager [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1054.568163] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800e8845-3150-48bc-9c23-393882ab972f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.574877] env[61972]: DEBUG nova.compute.manager [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61972) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1054.575417] env[61972]: DEBUG nova.objects.instance [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'flavor' on Instance uuid 65da0898-7f48-4ebf-9627-ba4e2ac68447 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.807321] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52fae9b7-49cd-9253-8522-0511b67cce4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.807616] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.807851] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.391011] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467c6d9c-164d-424c-9add-a6b9b3c8b5af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.398999] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb606fca-b609-4988-aed8-d5f6dd2a7d07 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.428692] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb04f3b9-871d-4975-960c-16705f88bb68 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.436020] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2bcf78-b364-4399-a139-9a80031d7876 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.449073] env[61972]: DEBUG nova.compute.provider_tree [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.582531] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.582798] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6356e04-82d2-45ad-bf83-77a0a0ff8bfa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.591548] env[61972]: DEBUG oslo_vmware.api [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1055.591548] env[61972]: value = "task-1389662" [ 1055.591548] env[61972]: _type = "Task" [ 1055.591548] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.599813] env[61972]: DEBUG oslo_vmware.api [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.952170] env[61972]: DEBUG nova.scheduler.client.report [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1056.102107] env[61972]: DEBUG oslo_vmware.api [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389662, 'name': PowerOffVM_Task, 'duration_secs': 0.183468} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.102407] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.102615] env[61972]: DEBUG nova.compute.manager [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1056.103372] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e84f302-b75f-4eaf-bab1-01dfbc536adc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.616339] env[61972]: DEBUG oslo_concurrency.lockutils [None req-8a5cc2e2-ab66-46fe-ae0d-7abae1f34fe2 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.962367] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.154s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.165061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.165061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.165061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "65da0898-7f48-4ebf-9627-ba4e2ac68447-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.165311] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.165343] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.167570] env[61972]: INFO nova.compute.manager [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Terminating instance [ 1057.520580] env[61972]: INFO nova.scheduler.client.report [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocation for migration c3052a8b-3533-4c20-90f9-e0bb7e55a321 [ 1057.671327] env[61972]: DEBUG nova.compute.manager [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1057.671726] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.672593] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b384365f-a8f8-48d0-b716-96ef2c82324a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.680554] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.680834] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58aa7e54-4378-418d-95fa-80528ca46042 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.754338] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.754546] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.754813] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore2] 65da0898-7f48-4ebf-9627-ba4e2ac68447 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.754976] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76143f27-907a-4a36-a5ec-486319248099 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.761882] env[61972]: DEBUG oslo_vmware.api [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1057.761882] env[61972]: value = "task-1389664" [ 1057.761882] env[61972]: _type = "Task" [ 1057.761882] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.769388] env[61972]: DEBUG oslo_vmware.api [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.027193] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.500s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.271456] env[61972]: DEBUG oslo_vmware.api [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136949} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.271711] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.271903] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.272100] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.272285] env[61972]: INFO nova.compute.manager [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1058.272527] env[61972]: DEBUG oslo.service.loopingcall [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.272719] env[61972]: DEBUG nova.compute.manager [-] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1058.272817] env[61972]: DEBUG nova.network.neutron [-] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.514572] env[61972]: DEBUG nova.compute.manager [req-f14b40d1-5920-4060-b844-a15598d50b1b req-cdf842ba-cfa0-459d-a070-05ea2383950c service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Received event network-vif-deleted-eb788717-6160-4bbc-9baf-642b3580b386 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1058.514805] env[61972]: INFO nova.compute.manager [req-f14b40d1-5920-4060-b844-a15598d50b1b req-cdf842ba-cfa0-459d-a070-05ea2383950c service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Neutron deleted interface eb788717-6160-4bbc-9baf-642b3580b386; detaching it from the instance and deleting it from the info cache [ 1058.515008] env[61972]: DEBUG nova.network.neutron [req-f14b40d1-5920-4060-b844-a15598d50b1b req-cdf842ba-cfa0-459d-a070-05ea2383950c service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.981520] env[61972]: DEBUG nova.network.neutron [-] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.017883] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27736d54-4a74-4e62-837f-27279a66323a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.027549] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf8fc98-05ec-48e3-98c9-744c73860400 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.053637] env[61972]: DEBUG nova.compute.manager [req-f14b40d1-5920-4060-b844-a15598d50b1b req-cdf842ba-cfa0-459d-a070-05ea2383950c service nova] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Detach interface failed, port_id=eb788717-6160-4bbc-9baf-642b3580b386, reason: Instance 65da0898-7f48-4ebf-9627-ba4e2ac68447 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1059.484041] env[61972]: INFO nova.compute.manager [-] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Took 1.21 seconds to deallocate network for instance. [ 1059.553740] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.554009] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.554266] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.554461] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.554637] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.557016] env[61972]: INFO nova.compute.manager [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Terminating instance [ 1059.989538] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.989822] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.990053] env[61972]: DEBUG nova.objects.instance [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'resources' on Instance uuid 65da0898-7f48-4ebf-9627-ba4e2ac68447 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.060486] env[61972]: DEBUG nova.compute.manager [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1060.060731] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.061643] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fed910-f882-440e-9ed2-22fd40ccbf56 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.070059] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.070059] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea5493b1-7e49-4a76-8825-0c1b32e0925b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.077030] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1060.077030] env[61972]: value = "task-1389665" [ 1060.077030] env[61972]: _type = "Task" [ 1060.077030] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.084783] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.566596] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de1a5fa-d9b6-4d7a-8603-065909de4e99 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.575551] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ac25b4-2312-415e-8a4b-46f7831e0d96 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.586613] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389665, 'name': PowerOffVM_Task, 'duration_secs': 0.167517} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.610492] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.610692] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.611144] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c525dc54-c7f2-45d1-bd2a-4c697102f64a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.613149] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3175bb-110d-47f8-bd00-5c5265b412d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.621013] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac73f78-4786-4cc8-924f-1485afb37b73 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.634012] env[61972]: DEBUG nova.compute.provider_tree [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.685903] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1060.686143] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1060.686367] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleting the datastore file [datastore2] 66d0dc08-e8e7-4bf5-884a-67f65e8e109d {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.686676] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2681e84c-c8a7-4e83-8e62-0990037692ee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.693294] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for the task: (returnval){ [ 1060.693294] env[61972]: value = "task-1389667" [ 1060.693294] env[61972]: _type = "Task" [ 1060.693294] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.701383] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.138044] env[61972]: DEBUG nova.scheduler.client.report [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1061.202537] env[61972]: DEBUG oslo_vmware.api [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Task: {'id': task-1389667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134226} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.202727] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.202917] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1061.203117] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.203297] env[61972]: INFO nova.compute.manager [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1061.203531] env[61972]: DEBUG oslo.service.loopingcall [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1061.203719] env[61972]: DEBUG nova.compute.manager [-] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1061.203813] env[61972]: DEBUG nova.network.neutron [-] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1061.461492] env[61972]: DEBUG nova.compute.manager [req-efcbb4f7-ee0f-4134-8157-aa079d1b4dab req-a3b737d4-f87d-4715-b188-cfdfb5c82ef5 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Received event network-vif-deleted-2cc52359-688b-48a0-8436-a1d5cfd37738 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1061.461644] env[61972]: INFO nova.compute.manager [req-efcbb4f7-ee0f-4134-8157-aa079d1b4dab req-a3b737d4-f87d-4715-b188-cfdfb5c82ef5 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Neutron deleted interface 2cc52359-688b-48a0-8436-a1d5cfd37738; detaching it from the instance and deleting it from the info cache [ 1061.461821] env[61972]: DEBUG nova.network.neutron [req-efcbb4f7-ee0f-4134-8157-aa079d1b4dab req-a3b737d4-f87d-4715-b188-cfdfb5c82ef5 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.642434] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.664313] env[61972]: INFO nova.scheduler.client.report [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance 65da0898-7f48-4ebf-9627-ba4e2ac68447 [ 1061.942834] env[61972]: DEBUG nova.network.neutron [-] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.964211] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-350f9fa2-0f83-468d-89ea-669f556d9f4c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.974064] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92b3440-6f61-41a8-8648-0a107edd3d30 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.999509] env[61972]: DEBUG nova.compute.manager [req-efcbb4f7-ee0f-4134-8157-aa079d1b4dab req-a3b737d4-f87d-4715-b188-cfdfb5c82ef5 service nova] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Detach interface failed, port_id=2cc52359-688b-48a0-8436-a1d5cfd37738, reason: Instance 66d0dc08-e8e7-4bf5-884a-67f65e8e109d could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1062.171971] env[61972]: DEBUG oslo_concurrency.lockutils [None req-1cbd7d2b-0a14-4d77-93a2-8bfd8d93ee73 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "65da0898-7f48-4ebf-9627-ba4e2ac68447" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.007s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.445824] env[61972]: INFO nova.compute.manager [-] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Took 1.24 seconds to deallocate network for instance. [ 1062.891819] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.892115] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.892337] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.892531] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.892709] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.895148] env[61972]: INFO nova.compute.manager [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Terminating instance [ 1062.953716] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.954013] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.954225] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.971753] env[61972]: INFO nova.scheduler.client.report [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Deleted allocations for instance 66d0dc08-e8e7-4bf5-884a-67f65e8e109d [ 1063.399216] env[61972]: DEBUG nova.compute.manager [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1063.399596] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.400365] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46801cf8-f031-4de8-935d-02b1b14097af {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.408223] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.408467] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-250d194c-d17e-4c06-833c-3b634ef634c0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.414414] env[61972]: DEBUG oslo_vmware.api [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1063.414414] env[61972]: value = "task-1389668" [ 1063.414414] env[61972]: _type = "Task" [ 1063.414414] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.423173] env[61972]: DEBUG oslo_vmware.api [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389668, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.480244] env[61972]: DEBUG oslo_concurrency.lockutils [None req-0abab4f3-0aaa-4c2f-928d-e2172b0cb0b0 tempest-DeleteServersTestJSON-1344274745 tempest-DeleteServersTestJSON-1344274745-project-member] Lock "66d0dc08-e8e7-4bf5-884a-67f65e8e109d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.926s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.924015] env[61972]: DEBUG oslo_vmware.api [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389668, 'name': PowerOffVM_Task, 'duration_secs': 0.19931} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.924350] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1063.924537] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1063.924800] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2c8a06b-3940-46d2-9934-0c5f2f5e924f {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.987186] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1063.987420] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1063.987707] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleting the datastore file [datastore1] 72435dc4-eae1-4606-bb32-e7e8e282d0b9 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1063.988029] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-439cbe66-672c-47f4-9003-813e0a02db9a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.994435] env[61972]: DEBUG oslo_vmware.api [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for the task: (returnval){ [ 1063.994435] env[61972]: value = "task-1389671" [ 1063.994435] env[61972]: _type = "Task" [ 1063.994435] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.002885] env[61972]: DEBUG oslo_vmware.api [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.504182] env[61972]: DEBUG oslo_vmware.api [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Task: {'id': task-1389671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149868} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.504576] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.504707] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.504905] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.505100] env[61972]: INFO nova.compute.manager [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1064.505369] env[61972]: DEBUG oslo.service.loopingcall [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.505587] env[61972]: DEBUG nova.compute.manager [-] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1064.505683] env[61972]: DEBUG nova.network.neutron [-] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1064.741406] env[61972]: DEBUG nova.compute.manager [req-4198d8d7-9cfb-46c0-aaa8-c40554569db8 req-475f231a-5f6a-4f38-aafc-6ca447bf62df service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Received event network-vif-deleted-3583e7ca-03b2-4200-8a2a-9394e6cec912 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1064.741669] env[61972]: INFO nova.compute.manager [req-4198d8d7-9cfb-46c0-aaa8-c40554569db8 req-475f231a-5f6a-4f38-aafc-6ca447bf62df service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Neutron deleted interface 3583e7ca-03b2-4200-8a2a-9394e6cec912; detaching it from the instance and deleting it from the info cache [ 1064.741857] env[61972]: DEBUG nova.network.neutron [req-4198d8d7-9cfb-46c0-aaa8-c40554569db8 req-475f231a-5f6a-4f38-aafc-6ca447bf62df service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.223202] env[61972]: DEBUG nova.network.neutron [-] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.245042] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f3332eb-b83f-4482-a955-7db7a83c26bd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.254701] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4a8041-eca3-4104-beaa-efc065083a82 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.278327] env[61972]: DEBUG nova.compute.manager [req-4198d8d7-9cfb-46c0-aaa8-c40554569db8 req-475f231a-5f6a-4f38-aafc-6ca447bf62df service nova] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Detach interface failed, port_id=3583e7ca-03b2-4200-8a2a-9394e6cec912, reason: Instance 72435dc4-eae1-4606-bb32-e7e8e282d0b9 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1065.726199] env[61972]: INFO nova.compute.manager [-] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Took 1.22 seconds to deallocate network for instance. [ 1066.232461] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.232751] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.232978] env[61972]: DEBUG nova.objects.instance [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lazy-loading 'resources' on Instance uuid 72435dc4-eae1-4606-bb32-e7e8e282d0b9 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.786049] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94424750-d51c-4b91-a848-b8cf7614ebe2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.793448] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc11814-cc1e-4527-b008-a5a77ed314a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.825482] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f1f187-37ba-4327-b52f-aea74bfdee7e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.832243] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c353d8-f445-4a89-b4ba-f431557d0cee {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.844845] env[61972]: DEBUG nova.compute.provider_tree [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.348276] env[61972]: DEBUG nova.scheduler.client.report [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1067.853206] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.878491] env[61972]: INFO nova.scheduler.client.report [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Deleted allocations for instance 72435dc4-eae1-4606-bb32-e7e8e282d0b9 [ 1068.389884] env[61972]: DEBUG oslo_concurrency.lockutils [None req-b1b7bc4a-a67a-43b5-abc2-18faeb790521 tempest-ServersTestJSON-1214410209 tempest-ServersTestJSON-1214410209-project-member] Lock "72435dc4-eae1-4606-bb32-e7e8e282d0b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.497s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.621514] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.621514] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1070.621514] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 1071.159992] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.160181] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquired lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.160335] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: d2864436-05a3-421f-98fd-41df925727c6] Forcefully refreshing network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1071.160506] env[61972]: DEBUG nova.objects.instance [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lazy-loading 'info_cache' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.417206] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.417473] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.883186] env[61972]: DEBUG nova.network.neutron [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [{"id": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "address": "fa:16:3e:5b:7e:e2", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98807bc5-c5", "ovs_interfaceid": "98807bc5-c5af-4bd9-ad5e-8c3043878d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.920049] env[61972]: DEBUG nova.compute.utils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1073.385640] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Releasing lock "refresh_cache-d2864436-05a3-421f-98fd-41df925727c6" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.385887] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updated the network info_cache for instance {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1073.386138] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1073.386312] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1073.422231] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.889824] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.889824] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.889824] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.889824] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1073.890589] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076516de-245b-498c-ba80-462368525497 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.898587] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441b7821-a080-4bcc-a2d8-651152114283 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.911543] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d4c7ba-b6a1-4c60-878e-64e97dbea603 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.917380] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a504c34-e8de-4b3e-9227-a4d403bff4d0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.951389] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180187MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1073.951554] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.951757] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.188441] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.188710] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.188926] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "d2864436-05a3-421f-98fd-41df925727c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.189128] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.189308] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.191427] env[61972]: INFO nova.compute.manager [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Terminating instance [ 1074.488512] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.488932] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.489252] env[61972]: INFO nova.compute.manager [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Attaching volume 4b08d8c7-ed8d-438e-af5f-934e47d03d2a to /dev/sdb [ 1074.520147] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b59145d-00f2-49e5-b23f-bc5be3210f32 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.527298] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1a215c-2046-46fa-813e-76d984d8e424 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.540110] env[61972]: DEBUG nova.virt.block_device [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updating existing volume attachment record: 59968228-462d-460b-8896-acc67f6bba5d {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1074.695586] env[61972]: DEBUG nova.compute.manager [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1074.695891] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.696791] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2641078-dcdb-4220-884e-629cc67db596 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.703777] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.704029] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61ae09c5-a481-4c72-8829-eb2f3ae1baed {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.709363] env[61972]: DEBUG oslo_vmware.api [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1074.709363] env[61972]: value = "task-1389673" [ 1074.709363] env[61972]: _type = "Task" [ 1074.709363] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.717877] env[61972]: DEBUG oslo_vmware.api [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.977617] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance d2864436-05a3-421f-98fd-41df925727c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.977789] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 63821242-c34e-4ed1-8ed3-f7f445ffe322 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.977968] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1074.978125] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1075.018813] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc880ef0-caf4-4a7b-baf2-4c34b8a10869 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.026526] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af3a7fc-5876-48e4-8416-a66824d723d1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.056700] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b42df9c-0235-498e-aea9-f61be188ddf4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.063883] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b352ee-c006-479a-ae8e-f30106fd7268 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.077178] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.218638] env[61972]: DEBUG oslo_vmware.api [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389673, 'name': PowerOffVM_Task, 'duration_secs': 0.213903} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.218918] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.219111] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1075.219368] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdd43ccf-ca79-424e-bcba-c3d0543bb470 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.281166] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1075.281394] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1075.281582] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Deleting the datastore file [datastore1] d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.281881] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b5af098-ebf7-4d6f-9a49-97be5fbf4912 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.288051] env[61972]: DEBUG oslo_vmware.api [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1075.288051] env[61972]: value = "task-1389675" [ 1075.288051] env[61972]: _type = "Task" [ 1075.288051] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.296265] env[61972]: DEBUG oslo_vmware.api [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.580270] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1075.798022] env[61972]: DEBUG oslo_vmware.api [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151328} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.798235] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.798423] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.798607] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.798812] env[61972]: INFO nova.compute.manager [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: d2864436-05a3-421f-98fd-41df925727c6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1075.799100] env[61972]: DEBUG oslo.service.loopingcall [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1075.799321] env[61972]: DEBUG nova.compute.manager [-] [instance: d2864436-05a3-421f-98fd-41df925727c6] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1075.799418] env[61972]: DEBUG nova.network.neutron [-] [instance: d2864436-05a3-421f-98fd-41df925727c6] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.085192] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1076.085449] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.134s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.085730] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.085874] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Cleaning up deleted instances {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11557}} [ 1076.264954] env[61972]: DEBUG nova.compute.manager [req-5248bf74-83ca-4be1-8613-d7e6e97111b5 req-c235411e-a1ed-4acc-aa0b-5ab687409e2b service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Received event network-vif-deleted-98807bc5-c5af-4bd9-ad5e-8c3043878d76 {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1076.265282] env[61972]: INFO nova.compute.manager [req-5248bf74-83ca-4be1-8613-d7e6e97111b5 req-c235411e-a1ed-4acc-aa0b-5ab687409e2b service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Neutron deleted interface 98807bc5-c5af-4bd9-ad5e-8c3043878d76; detaching it from the instance and deleting it from the info cache [ 1076.265390] env[61972]: DEBUG nova.network.neutron [req-5248bf74-83ca-4be1-8613-d7e6e97111b5 req-c235411e-a1ed-4acc-aa0b-5ab687409e2b service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.596451] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] There are 38 instances to clean {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11566}} [ 1076.596790] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 65da0898-7f48-4ebf-9627-ba4e2ac68447] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1076.740958] env[61972]: DEBUG nova.network.neutron [-] [instance: d2864436-05a3-421f-98fd-41df925727c6] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.768788] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa7b4e92-a17d-40ef-9af9-6619c02300a0 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.778465] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7f5bfe-45cb-4660-b1f6-641d06e918d3 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.803352] env[61972]: DEBUG nova.compute.manager [req-5248bf74-83ca-4be1-8613-d7e6e97111b5 req-c235411e-a1ed-4acc-aa0b-5ab687409e2b service nova] [instance: d2864436-05a3-421f-98fd-41df925727c6] Detach interface failed, port_id=98807bc5-c5af-4bd9-ad5e-8c3043878d76, reason: Instance d2864436-05a3-421f-98fd-41df925727c6 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1077.099728] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: fe623f2c-1fd9-43f0-be96-29bb252e0171] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1077.244765] env[61972]: INFO nova.compute.manager [-] [instance: d2864436-05a3-421f-98fd-41df925727c6] Took 1.45 seconds to deallocate network for instance. [ 1077.602877] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 66d0dc08-e8e7-4bf5-884a-67f65e8e109d] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1077.751054] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.751287] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.751511] env[61972]: DEBUG nova.objects.instance [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'resources' on Instance uuid d2864436-05a3-421f-98fd-41df925727c6 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.105866] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 86d022e2-bd02-45cd-a9dd-362e912dd8e1] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1078.295677] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f96475a-07cf-47ae-92d8-f3b8d2ef63c9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.303186] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d943c66-1e0d-4b80-bb69-f911f7cf79a7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.333295] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4619da78-eadd-4243-8d79-8aa5cd8ef2fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.340683] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea11293d-2795-4c47-a3ad-b0b5c3abc987 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.353288] env[61972]: DEBUG nova.compute.provider_tree [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.609478] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 91db79db-d83c-4473-87c8-9dff2f042500] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1078.856379] env[61972]: DEBUG nova.scheduler.client.report [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1079.083295] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Volume attach. Driver type: vmdk {{(pid=61972) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1079.083561] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294927', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'name': 'volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '63821242-c34e-4ed1-8ed3-f7f445ffe322', 'attached_at': '', 'detached_at': '', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'serial': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1079.084445] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61480778-d8d0-4d15-9fc3-ca68b527f1a8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.100496] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe89736a-51c4-40fa-887a-81f3b0abf22d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.117288] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: ff25c137-ba78-4807-bd64-f3075e81dd5d] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1079.126742] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a/volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.127270] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dce4d8b2-ae6b-43f0-a6b4-a13905e6e77e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.146804] env[61972]: DEBUG oslo_vmware.api [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1079.146804] env[61972]: value = "task-1389677" [ 1079.146804] env[61972]: _type = "Task" [ 1079.146804] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.154966] env[61972]: DEBUG oslo_vmware.api [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.360928] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.380619] env[61972]: INFO nova.scheduler.client.report [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Deleted allocations for instance d2864436-05a3-421f-98fd-41df925727c6 [ 1079.628308] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: f0565271-2276-4f18-813a-6f9338183480] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1079.657324] env[61972]: DEBUG oslo_vmware.api [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389677, 'name': ReconfigVM_Task, 'duration_secs': 0.323091} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.658198] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Reconfigured VM instance instance-00000066 to attach disk [datastore2] volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a/volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.662993] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-549c7dc9-e5a5-430e-ab4e-adf41a2bc1e8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.677557] env[61972]: DEBUG oslo_vmware.api [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1079.677557] env[61972]: value = "task-1389678" [ 1079.677557] env[61972]: _type = "Task" [ 1079.677557] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.686405] env[61972]: DEBUG oslo_vmware.api [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389678, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.889390] env[61972]: DEBUG oslo_concurrency.lockutils [None req-ce2a5d56-f892-4e66-b8b3-95831c323892 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "d2864436-05a3-421f-98fd-41df925727c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.700s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.131225] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: bf32c8b2-51b4-495a-b340-5dbabdf33137] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1080.188443] env[61972]: DEBUG oslo_vmware.api [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389678, 'name': ReconfigVM_Task, 'duration_secs': 0.272171} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.188801] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294927', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'name': 'volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '63821242-c34e-4ed1-8ed3-f7f445ffe322', 'attached_at': '', 'detached_at': '', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'serial': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1080.635194] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e0f1f580-76d3-4f15-9f41-9b7cd7cf6e4d] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.138196] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 8745c578-de46-4ade-bf08-f0bc9bb300d8] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.231967] env[61972]: DEBUG nova.objects.instance [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lazy-loading 'flavor' on Instance uuid 63821242-c34e-4ed1-8ed3-f7f445ffe322 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.641649] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: dda298ff-d2fa-4bf6-a6f0-746f2a8d69d7] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.736645] env[61972]: DEBUG oslo_concurrency.lockutils [None req-7f479c6a-f07a-4f29-b490-47349110101c tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.921712] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.921988] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.091174] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.091420] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.145056] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: f71d004b-5343-4ef3-8f37-8ff544c335a2] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1082.425313] env[61972]: INFO nova.compute.manager [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Detaching volume 4b08d8c7-ed8d-438e-af5f-934e47d03d2a [ 1082.463553] env[61972]: INFO nova.virt.block_device [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Attempting to driver detach volume 4b08d8c7-ed8d-438e-af5f-934e47d03d2a from mountpoint /dev/sdb [ 1082.463822] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Volume detach. Driver type: vmdk {{(pid=61972) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1082.463993] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294927', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'name': 'volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '63821242-c34e-4ed1-8ed3-f7f445ffe322', 'attached_at': '', 'detached_at': '', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'serial': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1082.465113] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47957c5e-1435-467d-b2b9-54cde2302fd5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.486444] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a998d43d-3d3d-4a57-8626-122ecd25c729 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.493211] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8edc780-240b-4cd8-ad53-8f2e944be422 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.512605] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eb9a6b-04a6-4def-82e0-ba7c590c5021 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.526521] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] The volume has not been displaced from its original location: [datastore2] volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a/volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a.vmdk. No consolidation needed. {{(pid=61972) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1082.531848] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1082.532174] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4efb0b46-957a-4bbc-943a-39cb4b3afc3a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.551311] env[61972]: DEBUG oslo_vmware.api [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1082.551311] env[61972]: value = "task-1389679" [ 1082.551311] env[61972]: _type = "Task" [ 1082.551311] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.559159] env[61972]: DEBUG oslo_vmware.api [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.594050] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Starting instance... {{(pid=61972) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1082.647769] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e8582450-36c2-4d6b-89ee-6fef324063c4] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1083.061056] env[61972]: DEBUG oslo_vmware.api [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389679, 'name': ReconfigVM_Task, 'duration_secs': 0.249518} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.061345] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1083.065953] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be04011a-87f8-4998-adab-637b7a08d289 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.081356] env[61972]: DEBUG oslo_vmware.api [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1083.081356] env[61972]: value = "task-1389680" [ 1083.081356] env[61972]: _type = "Task" [ 1083.081356] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.088896] env[61972]: DEBUG oslo_vmware.api [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389680, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.151403] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 65c02563-a348-4415-bb21-3d3711202838] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1083.238350] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.238622] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.242006] env[61972]: INFO nova.compute.claims [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.591620] env[61972]: DEBUG oslo_vmware.api [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389680, 'name': ReconfigVM_Task, 'duration_secs': 0.145227} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.591971] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294927', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'name': 'volume-4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '63821242-c34e-4ed1-8ed3-f7f445ffe322', 'attached_at': '', 'detached_at': '', 'volume_id': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a', 'serial': '4b08d8c7-ed8d-438e-af5f-934e47d03d2a'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1083.654674] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: b03b1fe7-2eda-4505-a6f9-19c570b15d1e] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.130325] env[61972]: DEBUG nova.objects.instance [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lazy-loading 'flavor' on Instance uuid 63821242-c34e-4ed1-8ed3-f7f445ffe322 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.158073] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 12a1a1ee-9aa1-4dda-9276-68492718e404] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.289813] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f957d2-5373-4f85-928b-703d18175bcb {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.297863] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0381301-89f3-409a-9887-04d1cd807826 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.327455] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bee7b98-b1ee-4aed-b345-42c2f4fc54be {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.334505] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3e6ccf-9b6d-46c5-b3de-a2f3b69cf16e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.348479] env[61972]: DEBUG nova.compute.provider_tree [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.660721] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 84e07f61-2111-43cb-93a2-9cb47ac52503] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.851872] env[61972]: DEBUG nova.scheduler.client.report [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1085.137067] env[61972]: DEBUG oslo_concurrency.lockutils [None req-9f71327d-f84c-4a76-b56c-7ef3fbb9daaf tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.215s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.164579] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: a4e65047-a892-4f18-8a14-0f5de25ce235] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1085.356795] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.118s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.357356] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Start building networks asynchronously for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1085.667035] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 1597e0f2-f67a-406e-9ef0-4d39b353ab0a] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1085.861757] env[61972]: DEBUG nova.compute.utils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1085.863102] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Allocating IP information in the background. {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1085.863281] env[61972]: DEBUG nova.network.neutron [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] allocate_for_instance() {{(pid=61972) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1085.898710] env[61972]: DEBUG nova.policy [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3be17eae7273428782fef3d4aa7b7cce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c822f4d4b5a4575ba334521b1b9fbde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61972) authorize /opt/stack/nova/nova/policy.py:201}} [ 1086.147090] env[61972]: DEBUG nova.network.neutron [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Successfully created port: bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.158593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.158593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.158593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "63821242-c34e-4ed1-8ed3-f7f445ffe322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.158593] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.158785] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.159523] env[61972]: INFO nova.compute.manager [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Terminating instance [ 1086.169542] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: a0b243b9-04b2-4fac-8fb7-a9ff5685f9ee] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1086.366725] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Start building block device mappings for instance. {{(pid=61972) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1086.663045] env[61972]: DEBUG nova.compute.manager [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1086.663169] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.663988] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c6a57f-bb66-4a0a-9ee2-53b5b65a7808 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.671637] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 56e21cf4-4dbc-4f72-97c0-082dd689c046] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1086.673508] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.673933] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b655b046-9d61-4c98-8536-62564ce8a0d6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.680058] env[61972]: DEBUG oslo_vmware.api [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1086.680058] env[61972]: value = "task-1389681" [ 1086.680058] env[61972]: _type = "Task" [ 1086.680058] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.688925] env[61972]: DEBUG oslo_vmware.api [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.174791] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 489f1de0-d1c8-4429-a6f1-24ea885282f3] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1087.189914] env[61972]: DEBUG oslo_vmware.api [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389681, 'name': PowerOffVM_Task, 'duration_secs': 0.201468} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.190195] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.190370] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.190623] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1731f6c4-5a93-48dd-867e-1c26610cd534 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.250839] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.251114] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Deleting contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.251355] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleting the datastore file [datastore2] 63821242-c34e-4ed1-8ed3-f7f445ffe322 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.251604] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3647c71c-e9c7-4e9d-889d-4a762784d50d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.257735] env[61972]: DEBUG oslo_vmware.api [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for the task: (returnval){ [ 1087.257735] env[61972]: value = "task-1389683" [ 1087.257735] env[61972]: _type = "Task" [ 1087.257735] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.265082] env[61972]: DEBUG oslo_vmware.api [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.376236] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Start spawning the instance on the hypervisor. {{(pid=61972) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1087.403453] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T12:08:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T12:07:56Z,direct_url=,disk_format='vmdk',id=79227ea9-188c-426d-a7d8-cb14b658f493,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d3f24b94cb854f4b925e1be405c7df82',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T12:07:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1087.403718] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Flavor limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1087.403992] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Image limits 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.404075] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Flavor pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1087.404228] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Image pref 0:0:0 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.404373] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61972) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1087.405283] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1087.405283] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1087.405283] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Got 1 possible topologies {{(pid=61972) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1087.405283] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1087.405283] env[61972]: DEBUG nova.virt.hardware [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61972) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1087.406155] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faf4906-ab66-44c9-a20b-1fd4629db1fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.413440] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f81fdb-107e-4f91-9ee1-1022bbd855a1 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.508413] env[61972]: DEBUG nova.compute.manager [req-1bf63408-872e-46e3-bc23-83f917a3cfd3 req-8ed2f2df-7633-4d31-9ffa-0a6d81e819c0 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Received event network-vif-plugged-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1087.508637] env[61972]: DEBUG oslo_concurrency.lockutils [req-1bf63408-872e-46e3-bc23-83f917a3cfd3 req-8ed2f2df-7633-4d31-9ffa-0a6d81e819c0 service nova] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.508873] env[61972]: DEBUG oslo_concurrency.lockutils [req-1bf63408-872e-46e3-bc23-83f917a3cfd3 req-8ed2f2df-7633-4d31-9ffa-0a6d81e819c0 service nova] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.509022] env[61972]: DEBUG oslo_concurrency.lockutils [req-1bf63408-872e-46e3-bc23-83f917a3cfd3 req-8ed2f2df-7633-4d31-9ffa-0a6d81e819c0 service nova] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.509190] env[61972]: DEBUG nova.compute.manager [req-1bf63408-872e-46e3-bc23-83f917a3cfd3 req-8ed2f2df-7633-4d31-9ffa-0a6d81e819c0 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] No waiting events found dispatching network-vif-plugged-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1087.509355] env[61972]: WARNING nova.compute.manager [req-1bf63408-872e-46e3-bc23-83f917a3cfd3 req-8ed2f2df-7633-4d31-9ffa-0a6d81e819c0 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Received unexpected event network-vif-plugged-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c for instance with vm_state building and task_state spawning. [ 1087.595413] env[61972]: DEBUG nova.network.neutron [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Successfully updated port: bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1087.678399] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9a0463a0-dc96-41b1-8415-22011644ac0d] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1087.768497] env[61972]: DEBUG oslo_vmware.api [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Task: {'id': task-1389683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145687} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.768718] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.768878] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Deleted contents of the VM from datastore datastore2 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.769071] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.769254] env[61972]: INFO nova.compute.manager [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1087.769500] env[61972]: DEBUG oslo.service.loopingcall [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.769701] env[61972]: DEBUG nova.compute.manager [-] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1087.769798] env[61972]: DEBUG nova.network.neutron [-] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1088.100404] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.100558] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.100775] env[61972]: DEBUG nova.network.neutron [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Building network info cache for instance {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.181629] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: b9726bf4-a4b1-4b22-840f-98157d0d790c] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.639192] env[61972]: DEBUG nova.network.neutron [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Instance cache missing network info. {{(pid=61972) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1088.685083] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 72435dc4-eae1-4606-bb32-e7e8e282d0b9] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.738914] env[61972]: DEBUG nova.network.neutron [-] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.768422] env[61972]: DEBUG nova.network.neutron [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating instance_info_cache with network_info: [{"id": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "address": "fa:16:3e:7a:2a:28", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbec4a1b3-9a", "ovs_interfaceid": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.188941] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 942b00ba-a615-452d-a0c1-633d48d73fd4] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1089.241810] env[61972]: INFO nova.compute.manager [-] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Took 1.47 seconds to deallocate network for instance. [ 1089.270729] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.271055] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Instance network_info: |[{"id": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "address": "fa:16:3e:7a:2a:28", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbec4a1b3-9a", "ovs_interfaceid": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61972) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1089.271511] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:2a:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bec4a1b3-9a89-40d4-9d33-ea537d9ba51c', 'vif_model': 'vmxnet3'}] {{(pid=61972) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1089.279697] env[61972]: DEBUG oslo.service.loopingcall [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1089.280730] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Creating VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1089.280973] env[61972]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-431d7e04-914e-460d-9925-2e1021761f92 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.301999] env[61972]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1089.301999] env[61972]: value = "task-1389684" [ 1089.301999] env[61972]: _type = "Task" [ 1089.301999] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.312722] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389684, 'name': CreateVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.537901] env[61972]: DEBUG nova.compute.manager [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Received event network-changed-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1089.538291] env[61972]: DEBUG nova.compute.manager [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Refreshing instance network info cache due to event network-changed-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1089.538540] env[61972]: DEBUG oslo_concurrency.lockutils [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] Acquiring lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.538690] env[61972]: DEBUG oslo_concurrency.lockutils [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] Acquired lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.538858] env[61972]: DEBUG nova.network.neutron [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Refreshing network info cache for port bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.693165] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9562558a-89ba-4169-bd0a-ad31fc0c33bc] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1089.748344] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.748624] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.748845] env[61972]: DEBUG nova.objects.instance [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lazy-loading 'resources' on Instance uuid 63821242-c34e-4ed1-8ed3-f7f445ffe322 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.811190] env[61972]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389684, 'name': CreateVM_Task, 'duration_secs': 0.31717} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.811307] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Created VM on the ESX host {{(pid=61972) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1089.811963] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.812152] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.812467] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1089.812709] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4155970c-09a4-4ba9-b6ba-0b29cd45053a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.816878] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1089.816878] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b8916e-6ae1-57c4-ea93-849e9e6264a9" [ 1089.816878] env[61972]: _type = "Task" [ 1089.816878] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.824154] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b8916e-6ae1-57c4-ea93-849e9e6264a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.196045] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 21440243-458c-4640-b0ba-8f3b8b1b0720] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1090.215580] env[61972]: DEBUG nova.network.neutron [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updated VIF entry in instance network info cache for port bec4a1b3-9a89-40d4-9d33-ea537d9ba51c. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.215921] env[61972]: DEBUG nova.network.neutron [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating instance_info_cache with network_info: [{"id": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "address": "fa:16:3e:7a:2a:28", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbec4a1b3-9a", "ovs_interfaceid": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.293174] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a15ee1-469c-4bac-841d-45930a54a246 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.300755] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1524390c-f862-4b32-9ed5-0233b4c63681 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.333541] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d6d10c-32a2-4011-80f2-b6a04a2a0b36 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.340716] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52b8916e-6ae1-57c4-ea93-849e9e6264a9, 'name': SearchDatastore_Task, 'duration_secs': 0.010128} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.342589] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.342833] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Processing image 79227ea9-188c-426d-a7d8-cb14b658f493 {{(pid=61972) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1090.343083] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.343241] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.343425] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.343689] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be4a35cb-ad0d-488d-9fa7-150cbdc36dd4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.346138] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193e882a-b3ee-4d40-abca-319e4de8db83 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.358718] env[61972]: DEBUG nova.compute.provider_tree [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.360646] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61972) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.360822] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61972) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1090.361706] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b133e1c8-adf9-4190-ba2f-9803027e2105 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.366230] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1090.366230] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d382a0-21b5-b145-5a89-e3c68a3adc5d" [ 1090.366230] env[61972]: _type = "Task" [ 1090.366230] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.373978] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d382a0-21b5-b145-5a89-e3c68a3adc5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.698937] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 667aff7f-57d5-4133-934d-386602a866f8] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1090.719776] env[61972]: DEBUG oslo_concurrency.lockutils [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] Releasing lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.720059] env[61972]: DEBUG nova.compute.manager [req-799ac3c0-9dca-4ddf-9d39-4733aa5b881d req-642fd54e-f063-4b4a-8bc7-3245b7c50697 service nova] [instance: 63821242-c34e-4ed1-8ed3-f7f445ffe322] Received event network-vif-deleted-984fa809-bf48-4083-bd47-872fafdec46a {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1090.863552] env[61972]: DEBUG nova.scheduler.client.report [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1090.877023] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]52d382a0-21b5-b145-5a89-e3c68a3adc5d, 'name': SearchDatastore_Task, 'duration_secs': 0.008141} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.878285] env[61972]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bc85aa9-0ba5-43e8-91ed-370369b241ea {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.883801] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1090.883801] env[61972]: value = "session[52a9d73d-5959-3000-f45d-05308a20e7d5]5285b1f6-3fc0-1631-b52a-61c0ecbbae1d" [ 1090.883801] env[61972]: _type = "Task" [ 1090.883801] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.890975] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5285b1f6-3fc0-1631-b52a-61c0ecbbae1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.202329] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 94bd64b9-3d20-4631-baed-4500f9beb9c2] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1091.368057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.619s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.389149] env[61972]: INFO nova.scheduler.client.report [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Deleted allocations for instance 63821242-c34e-4ed1-8ed3-f7f445ffe322 [ 1091.393618] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': session[52a9d73d-5959-3000-f45d-05308a20e7d5]5285b1f6-3fc0-1631-b52a-61c0ecbbae1d, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.396039] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.396216] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 6a19486c-40c9-4210-a16f-22d9e8563cd9/6a19486c-40c9-4210-a16f-22d9e8563cd9.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1091.396635] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20870d67-96f9-432a-b0bb-bd10c05258fa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.403591] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1091.403591] env[61972]: value = "task-1389685" [ 1091.403591] env[61972]: _type = "Task" [ 1091.403591] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.412125] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.705494] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: e2b6dd4e-b639-4553-a45f-87c155506ea3] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1091.900118] env[61972]: DEBUG oslo_concurrency.lockutils [None req-5e1b2055-a677-4a41-98ef-817e500d2845 tempest-AttachVolumeNegativeTest-105532474 tempest-AttachVolumeNegativeTest-105532474-project-member] Lock "63821242-c34e-4ed1-8ed3-f7f445ffe322" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.743s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.913799] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389685, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460699} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.914158] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/79227ea9-188c-426d-a7d8-cb14b658f493/79227ea9-188c-426d-a7d8-cb14b658f493.vmdk to [datastore1] 6a19486c-40c9-4210-a16f-22d9e8563cd9/6a19486c-40c9-4210-a16f-22d9e8563cd9.vmdk {{(pid=61972) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.914415] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Extending root virtual disk to 1048576 {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.915294] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-239be3e1-b22b-4ee0-adb0-68e29cbfa5f6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.921872] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1091.921872] env[61972]: value = "task-1389687" [ 1091.921872] env[61972]: _type = "Task" [ 1091.921872] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.929656] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.209149] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 3d424523-b45d-4174-ac7a-08fd653e314f] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1092.432714] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0614} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.433019] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Extended root virtual disk {{(pid=61972) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1092.433821] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0709f7c3-75b0-476c-9502-52e89558f3f6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.455131] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 6a19486c-40c9-4210-a16f-22d9e8563cd9/6a19486c-40c9-4210-a16f-22d9e8563cd9.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1092.455416] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f2f09d2-6eb8-455c-98fb-4afb46769840 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.474434] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1092.474434] env[61972]: value = "task-1389688" [ 1092.474434] env[61972]: _type = "Task" [ 1092.474434] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.481732] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389688, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.715244] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: caad50a8-e0ad-4ca9-b391-691ead1756f0] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1092.983890] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389688, 'name': ReconfigVM_Task, 'duration_secs': 0.272716} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.984221] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 6a19486c-40c9-4210-a16f-22d9e8563cd9/6a19486c-40c9-4210-a16f-22d9e8563cd9.vmdk or device None with type sparse {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.984879] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e3ebed9-e280-4820-a5ed-400a18307462 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.991114] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1092.991114] env[61972]: value = "task-1389689" [ 1092.991114] env[61972]: _type = "Task" [ 1092.991114] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.998945] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389689, 'name': Rename_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.218842] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 89cbc6ec-7546-443c-9abb-47940d223daa] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1093.501155] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389689, 'name': Rename_Task, 'duration_secs': 0.134293} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.501442] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Powering on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.501679] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79013597-937a-4e1a-856e-dc3b12010ac9 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.507236] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1093.507236] env[61972]: value = "task-1389690" [ 1093.507236] env[61972]: _type = "Task" [ 1093.507236] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.515232] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389690, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.722373] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 1cd50cd6-ccb2-41aa-8c24-9eabed18de6b] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1094.017293] env[61972]: DEBUG oslo_vmware.api [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389690, 'name': PowerOnVM_Task, 'duration_secs': 0.411241} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.017577] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Powered on the VM {{(pid=61972) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.017794] env[61972]: INFO nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Took 6.64 seconds to spawn the instance on the hypervisor. [ 1094.017975] env[61972]: DEBUG nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Checking state {{(pid=61972) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1094.018753] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39367717-b3c0-4f9e-ad09-6d6a7031387a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.225199] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 0cd09167-2c2f-4cad-b26d-35aa208fbf79] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1094.534188] env[61972]: INFO nova.compute.manager [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Took 11.43 seconds to build instance. [ 1094.728025] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 036a2dfc-615d-410a-8a3f-32de621879c2] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1095.036054] env[61972]: DEBUG oslo_concurrency.lockutils [None req-be19728f-0330-402d-a8dd-c940d9518497 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.944s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.231133] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] [instance: 9fd9fc35-7105-4941-8e05-cf4e45bb5d29] Instance has had 0 of 5 cleanup attempts {{(pid=61972) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1095.480981] env[61972]: DEBUG nova.compute.manager [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Received event network-changed-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1095.481247] env[61972]: DEBUG nova.compute.manager [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Refreshing instance network info cache due to event network-changed-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c. {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1095.481445] env[61972]: DEBUG oslo_concurrency.lockutils [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] Acquiring lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.481591] env[61972]: DEBUG oslo_concurrency.lockutils [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] Acquired lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.481832] env[61972]: DEBUG nova.network.neutron [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Refreshing network info cache for port bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.734390] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.734688] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Cleaning up deleted instances with incomplete migration {{(pid=61972) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11595}} [ 1096.182070] env[61972]: DEBUG nova.network.neutron [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updated VIF entry in instance network info cache for port bec4a1b3-9a89-40d4-9d33-ea537d9ba51c. {{(pid=61972) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1096.182478] env[61972]: DEBUG nova.network.neutron [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating instance_info_cache with network_info: [{"id": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "address": "fa:16:3e:7a:2a:28", "network": {"id": "73ff2fc4-b3de-4d3f-8f52-1276930293b9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-668205254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c822f4d4b5a4575ba334521b1b9fbde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbec4a1b3-9a", "ovs_interfaceid": "bec4a1b3-9a89-40d4-9d33-ea537d9ba51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.237197] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.685583] env[61972]: DEBUG oslo_concurrency.lockutils [req-744cedaa-46f3-4843-bd82-8b12e39fe1ad req-09e8e182-0c26-478d-a440-0eedc1523b18 service nova] Releasing lock "refresh_cache-6a19486c-40c9-4210-a16f-22d9e8563cd9" {{(pid=61972) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.972487] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.972794] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.477265] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.477406] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.477599] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.477738] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.477926] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.478028] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1114.272200] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._sync_power_states {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.777059] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Getting list of instances from cluster (obj){ [ 1114.777059] env[61972]: value = "domain-c8" [ 1114.777059] env[61972]: _type = "ClusterComputeResource" [ 1114.777059] env[61972]: } {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1114.777310] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83005d27-899a-44fe-9bfb-c5ea3c675654 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.787508] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Got total of 1 instances {{(pid=61972) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1114.787665] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Triggering sync for uuid 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1114.788038] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.788287] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.789095] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd6a003-8941-476c-831a-7eaa88862f82 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.297450] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.509s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.136083] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.136472] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1132.639403] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Didn't find any instances for network info cache update. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1132.639668] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.143076] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.143354] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.143354] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.143516] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1133.144430] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fb23e2-02ae-4775-a55b-a82e03249319 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.152633] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c7e449-7b93-4276-a399-5d2f26f3857b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.166615] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fed94b-46f3-4899-900e-794adead56d7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.172838] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9632492f-2791-4ceb-aa95-fcf59bb106fc {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.201893] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180780MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1133.202056] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.202247] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.152410] env[61972]: DEBUG oslo_concurrency.lockutils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.152686] env[61972]: DEBUG oslo_concurrency.lockutils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.224971] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Instance 6a19486c-40c9-4210-a16f-22d9e8563cd9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61972) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.225194] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1134.225374] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61972) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1134.251182] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19df8667-8d50-48c5-956a-5ce1ca2461a4 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.258964] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aee87a0-14eb-481f-9bc4-b5cfa0bad546 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.288455] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1708fb9e-e277-4af6-9dd4-fed57a837604 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.295237] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737c704d-7644-4351-878a-51cd112474ad {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.308936] env[61972]: DEBUG nova.compute.provider_tree [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.655382] env[61972]: DEBUG nova.compute.utils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1134.811973] env[61972]: DEBUG nova.scheduler.client.report [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1135.158306] env[61972]: DEBUG oslo_concurrency.lockutils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.317414] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61972) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1135.317581] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.217061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.217061] env[61972]: DEBUG oslo_concurrency.lockutils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.217520] env[61972]: INFO nova.compute.manager [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Attaching volume 048daf86-e8fc-4071-a985-cdaded9f5a7f to /dev/sdb [ 1136.271048] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ccaafd-986b-463e-94ce-a88496313639 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.277587] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0898bd-2006-44bc-afb2-bd4ea8394169 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.290930] env[61972]: DEBUG nova.virt.block_device [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating existing volume attachment record: bf6f002b-f23e-4949-b2df-aeed0f5067ee {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1136.297257] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.297491] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.297662] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.619302] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.619594] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.615129] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.619332] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.619760] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61972) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1140.832328] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Volume attach. Driver type: vmdk {{(pid=61972) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1140.832582] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294929', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'name': 'volume-048daf86-e8fc-4071-a985-cdaded9f5a7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'serial': '048daf86-e8fc-4071-a985-cdaded9f5a7f'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1140.833501] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492faa18-6d7f-42d4-bfc4-cf8380d6f347 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.849719] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20dae37-e137-40b0-af40-a2b482930edd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.873106] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-048daf86-e8fc-4071-a985-cdaded9f5a7f/volume-048daf86-e8fc-4071-a985-cdaded9f5a7f.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.873354] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50a36a8c-d33b-49ae-bde7-9544c00aca34 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.890447] env[61972]: DEBUG oslo_vmware.api [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1140.890447] env[61972]: value = "task-1389694" [ 1140.890447] env[61972]: _type = "Task" [ 1140.890447] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.897738] env[61972]: DEBUG oslo_vmware.api [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389694, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.402066] env[61972]: DEBUG oslo_vmware.api [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389694, 'name': ReconfigVM_Task, 'duration_secs': 0.337133} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.402350] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-048daf86-e8fc-4071-a985-cdaded9f5a7f/volume-048daf86-e8fc-4071-a985-cdaded9f5a7f.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.406853] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83741e69-a5b2-4073-b340-a185640a889e {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.421257] env[61972]: DEBUG oslo_vmware.api [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1141.421257] env[61972]: value = "task-1389695" [ 1141.421257] env[61972]: _type = "Task" [ 1141.421257] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.428422] env[61972]: DEBUG oslo_vmware.api [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389695, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.931691] env[61972]: DEBUG oslo_vmware.api [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389695, 'name': ReconfigVM_Task, 'duration_secs': 0.122721} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.932032] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294929', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'name': 'volume-048daf86-e8fc-4071-a985-cdaded9f5a7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'serial': '048daf86-e8fc-4071-a985-cdaded9f5a7f'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1142.968060] env[61972]: DEBUG nova.objects.instance [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.474351] env[61972]: DEBUG oslo_concurrency.lockutils [None req-190fcdc2-8b4e-4201-aac3-3e91054e0982 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.257s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.296325] env[61972]: DEBUG oslo_concurrency.lockutils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.296605] env[61972]: DEBUG oslo_concurrency.lockutils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.800632] env[61972]: DEBUG nova.compute.utils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Using /dev/sd instead of None {{(pid=61972) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1145.304040] env[61972]: DEBUG oslo_concurrency.lockutils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.356193] env[61972]: DEBUG oslo_concurrency.lockutils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.356595] env[61972]: DEBUG oslo_concurrency.lockutils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.356759] env[61972]: INFO nova.compute.manager [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Attaching volume 714e98fe-219f-4783-b0f6-f7356c8f143c to /dev/sdc [ 1146.385808] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ca78d7-5506-4754-a93d-5e9ddc5b03f7 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.392834] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90e293a-2ee5-48fa-8daf-f632e766d099 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.405141] env[61972]: DEBUG nova.virt.block_device [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating existing volume attachment record: ba793f88-a007-4416-b964-27b18257ece9 {{(pid=61972) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1150.946962] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Volume attach. Driver type: vmdk {{(pid=61972) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1150.947256] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294930', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'name': 'volume-714e98fe-219f-4783-b0f6-f7356c8f143c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'serial': '714e98fe-219f-4783-b0f6-f7356c8f143c'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1150.948200] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941580ac-1885-4809-bdbe-dc72870523e5 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.964963] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d17a8ce-5d42-4900-9201-f7d5a9acc4aa {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.992242] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-714e98fe-219f-4783-b0f6-f7356c8f143c/volume-714e98fe-219f-4783-b0f6-f7356c8f143c.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.992489] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af158f4b-fb24-4c7e-8664-9bde7d8708e8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.010485] env[61972]: DEBUG oslo_vmware.api [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1151.010485] env[61972]: value = "task-1389698" [ 1151.010485] env[61972]: _type = "Task" [ 1151.010485] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.020090] env[61972]: DEBUG oslo_vmware.api [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389698, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.519789] env[61972]: DEBUG oslo_vmware.api [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389698, 'name': ReconfigVM_Task, 'duration_secs': 0.31989} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.520085] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-714e98fe-219f-4783-b0f6-f7356c8f143c/volume-714e98fe-219f-4783-b0f6-f7356c8f143c.vmdk or device None with type thin {{(pid=61972) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1151.524594] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3f36a52-61b7-4853-82da-ec80d6ab2060 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.538995] env[61972]: DEBUG oslo_vmware.api [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1151.538995] env[61972]: value = "task-1389699" [ 1151.538995] env[61972]: _type = "Task" [ 1151.538995] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.548848] env[61972]: DEBUG oslo_vmware.api [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389699, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.048572] env[61972]: DEBUG oslo_vmware.api [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389699, 'name': ReconfigVM_Task, 'duration_secs': 0.140041} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.048878] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294930', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'name': 'volume-714e98fe-219f-4783-b0f6-f7356c8f143c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'serial': '714e98fe-219f-4783-b0f6-f7356c8f143c'} {{(pid=61972) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1153.084753] env[61972]: DEBUG nova.objects.instance [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.591367] env[61972]: DEBUG oslo_concurrency.lockutils [None req-67024b50-9dbf-4bcf-95a8-f359b9b6cf1f tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.235s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.868185] env[61972]: DEBUG oslo_concurrency.lockutils [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.868452] env[61972]: DEBUG oslo_concurrency.lockutils [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.371614] env[61972]: INFO nova.compute.manager [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Detaching volume 048daf86-e8fc-4071-a985-cdaded9f5a7f [ 1154.402032] env[61972]: INFO nova.virt.block_device [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Attempting to driver detach volume 048daf86-e8fc-4071-a985-cdaded9f5a7f from mountpoint /dev/sdb [ 1154.402032] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Volume detach. Driver type: vmdk {{(pid=61972) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1154.402032] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294929', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'name': 'volume-048daf86-e8fc-4071-a985-cdaded9f5a7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'serial': '048daf86-e8fc-4071-a985-cdaded9f5a7f'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1154.402879] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3592aedc-6a03-4c4d-bfb8-8f98317b98ce {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.426905] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff898bb0-2d41-4619-b5d9-ae21f709966b {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.433827] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ec3fcc-e03b-4421-8812-f760793467f2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.455796] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d816a145-e4ca-4007-adf5-6cde2f319f50 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.469345] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] The volume has not been displaced from its original location: [datastore2] volume-048daf86-e8fc-4071-a985-cdaded9f5a7f/volume-048daf86-e8fc-4071-a985-cdaded9f5a7f.vmdk. No consolidation needed. {{(pid=61972) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1154.474442] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1154.474691] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d6df51a-3a74-4ab8-8773-6b2cb3d9f3a6 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.491576] env[61972]: DEBUG oslo_vmware.api [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1154.491576] env[61972]: value = "task-1389700" [ 1154.491576] env[61972]: _type = "Task" [ 1154.491576] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.498591] env[61972]: DEBUG oslo_vmware.api [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389700, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.000618] env[61972]: DEBUG oslo_vmware.api [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389700, 'name': ReconfigVM_Task, 'duration_secs': 0.200677} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.000907] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1155.005516] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c983594c-7fbc-4b55-8f41-80ba30a7accf {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.020313] env[61972]: DEBUG oslo_vmware.api [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1155.020313] env[61972]: value = "task-1389701" [ 1155.020313] env[61972]: _type = "Task" [ 1155.020313] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.029308] env[61972]: DEBUG oslo_vmware.api [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389701, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.529555] env[61972]: DEBUG oslo_vmware.api [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389701, 'name': ReconfigVM_Task, 'duration_secs': 0.128721} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.529929] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294929', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'name': 'volume-048daf86-e8fc-4071-a985-cdaded9f5a7f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '048daf86-e8fc-4071-a985-cdaded9f5a7f', 'serial': '048daf86-e8fc-4071-a985-cdaded9f5a7f'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1156.069819] env[61972]: DEBUG nova.objects.instance [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.077848] env[61972]: DEBUG oslo_concurrency.lockutils [None req-457c1735-c2fb-4f2e-be99-bdc06329bcf6 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.099741] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.099995] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.602556] env[61972]: INFO nova.compute.manager [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Detaching volume 714e98fe-219f-4783-b0f6-f7356c8f143c [ 1157.632031] env[61972]: INFO nova.virt.block_device [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Attempting to driver detach volume 714e98fe-219f-4783-b0f6-f7356c8f143c from mountpoint /dev/sdc [ 1157.632287] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Volume detach. Driver type: vmdk {{(pid=61972) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1157.632476] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294930', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'name': 'volume-714e98fe-219f-4783-b0f6-f7356c8f143c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'serial': '714e98fe-219f-4783-b0f6-f7356c8f143c'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1157.633408] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f60c08-2f0f-4f3d-ae70-0a058a64cb6a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.654293] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7228c894-94db-4a7e-a338-0ec7eab4b38a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.661041] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ba12da-45ac-4e20-a1a7-7b9380019d50 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.680250] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1caf2211-9fd0-4153-be06-957b6a9a9ed8 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.695281] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] The volume has not been displaced from its original location: [datastore2] volume-714e98fe-219f-4783-b0f6-f7356c8f143c/volume-714e98fe-219f-4783-b0f6-f7356c8f143c.vmdk. No consolidation needed. {{(pid=61972) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1157.700364] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfiguring VM instance instance-00000068 to detach disk 2002 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1157.700618] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-690f72fa-a3a2-49c2-a6e4-2db674af05c2 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.718032] env[61972]: DEBUG oslo_vmware.api [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1157.718032] env[61972]: value = "task-1389702" [ 1157.718032] env[61972]: _type = "Task" [ 1157.718032] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.725257] env[61972]: DEBUG oslo_vmware.api [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389702, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.227264] env[61972]: DEBUG oslo_vmware.api [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389702, 'name': ReconfigVM_Task, 'duration_secs': 0.228125} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.227605] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Reconfigured VM instance instance-00000068 to detach disk 2002 {{(pid=61972) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1158.232059] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eca76353-60c4-4d09-9dcc-4eb925275769 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.246957] env[61972]: DEBUG oslo_vmware.api [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1158.246957] env[61972]: value = "task-1389703" [ 1158.246957] env[61972]: _type = "Task" [ 1158.246957] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.254248] env[61972]: DEBUG oslo_vmware.api [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389703, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.757239] env[61972]: DEBUG oslo_vmware.api [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389703, 'name': ReconfigVM_Task, 'duration_secs': 0.132208} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.757553] env[61972]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294930', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'name': 'volume-714e98fe-219f-4783-b0f6-f7356c8f143c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6a19486c-40c9-4210-a16f-22d9e8563cd9', 'attached_at': '', 'detached_at': '', 'volume_id': '714e98fe-219f-4783-b0f6-f7356c8f143c', 'serial': '714e98fe-219f-4783-b0f6-f7356c8f143c'} {{(pid=61972) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1159.298445] env[61972]: DEBUG nova.objects.instance [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'flavor' on Instance uuid 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.306265] env[61972]: DEBUG oslo_concurrency.lockutils [None req-4e059a32-7873-4507-8836-43e7243dddf1 tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.206s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.467057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.467057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.467057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "6a19486c-40c9-4210-a16f-22d9e8563cd9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.467057] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.467520] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.469444] env[61972]: INFO nova.compute.manager [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Terminating instance [ 1161.973240] env[61972]: DEBUG nova.compute.manager [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Start destroying the instance on the hypervisor. {{(pid=61972) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1161.973480] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Destroying instance {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.974443] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd77ea4-6ad1-4b12-9d1a-b6223b80fd1a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.982434] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Powering off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.982677] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0448a054-df24-45bf-8397-150e2c276f49 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.989130] env[61972]: DEBUG oslo_vmware.api [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1161.989130] env[61972]: value = "task-1389704" [ 1161.989130] env[61972]: _type = "Task" [ 1161.989130] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.996720] env[61972]: DEBUG oslo_vmware.api [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.499456] env[61972]: DEBUG oslo_vmware.api [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389704, 'name': PowerOffVM_Task, 'duration_secs': 0.235429} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.499850] env[61972]: DEBUG nova.virt.vmwareapi.vm_util [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Powered off the VM {{(pid=61972) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.499991] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Unregistering the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1162.500203] env[61972]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7f095d2-ab80-473b-8234-8b2e769a88bd {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.809037] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Unregistered the VM {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.809244] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Deleting contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.809399] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Deleting the datastore file [datastore1] 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.809672] env[61972]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1127192a-751b-42de-9e4a-4f2e189a9633 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.816976] env[61972]: DEBUG oslo_vmware.api [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for the task: (returnval){ [ 1162.816976] env[61972]: value = "task-1389706" [ 1162.816976] env[61972]: _type = "Task" [ 1162.816976] env[61972]: } to complete. {{(pid=61972) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.824102] env[61972]: DEBUG oslo_vmware.api [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389706, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.327054] env[61972]: DEBUG oslo_vmware.api [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Task: {'id': task-1389706, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156898} completed successfully. {{(pid=61972) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.327317] env[61972]: DEBUG nova.virt.vmwareapi.ds_util [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Deleted the datastore file {{(pid=61972) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1163.327505] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Deleted contents of the VM from datastore datastore1 {{(pid=61972) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1163.327695] env[61972]: DEBUG nova.virt.vmwareapi.vmops [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Instance destroyed {{(pid=61972) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1163.327892] env[61972]: INFO nova.compute.manager [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1163.328152] env[61972]: DEBUG oslo.service.loopingcall [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61972) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1163.328342] env[61972]: DEBUG nova.compute.manager [-] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Deallocating network for instance {{(pid=61972) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1163.328438] env[61972]: DEBUG nova.network.neutron [-] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] deallocate_for_instance() {{(pid=61972) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1163.762984] env[61972]: DEBUG nova.compute.manager [req-365f38b8-344d-4df3-85a5-6ae776484181 req-dc93f7d7-10f4-45e9-b32b-fe07508a307b service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Received event network-vif-deleted-bec4a1b3-9a89-40d4-9d33-ea537d9ba51c {{(pid=61972) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1163.763240] env[61972]: INFO nova.compute.manager [req-365f38b8-344d-4df3-85a5-6ae776484181 req-dc93f7d7-10f4-45e9-b32b-fe07508a307b service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Neutron deleted interface bec4a1b3-9a89-40d4-9d33-ea537d9ba51c; detaching it from the instance and deleting it from the info cache [ 1163.763407] env[61972]: DEBUG nova.network.neutron [req-365f38b8-344d-4df3-85a5-6ae776484181 req-dc93f7d7-10f4-45e9-b32b-fe07508a307b service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.214278] env[61972]: DEBUG nova.network.neutron [-] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Updating instance_info_cache with network_info: [] {{(pid=61972) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.266255] env[61972]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a93d51c7-fb2c-4fe6-afa7-1179e143a46d {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.276009] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69859f77-ced8-46fc-8e26-2e91ce6f0a3c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.299546] env[61972]: DEBUG nova.compute.manager [req-365f38b8-344d-4df3-85a5-6ae776484181 req-dc93f7d7-10f4-45e9-b32b-fe07508a307b service nova] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Detach interface failed, port_id=bec4a1b3-9a89-40d4-9d33-ea537d9ba51c, reason: Instance 6a19486c-40c9-4210-a16f-22d9e8563cd9 could not be found. {{(pid=61972) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1164.717738] env[61972]: INFO nova.compute.manager [-] [instance: 6a19486c-40c9-4210-a16f-22d9e8563cd9] Took 1.39 seconds to deallocate network for instance. [ 1165.224184] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.224546] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.224690] env[61972]: DEBUG nova.objects.instance [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lazy-loading 'resources' on Instance uuid 6a19486c-40c9-4210-a16f-22d9e8563cd9 {{(pid=61972) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.759918] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f97b45-5e63-42dd-b795-b0b7d3ae638c {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.767018] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595353ff-86c4-4070-80e1-69b9c2b5a281 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.798178] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77df45d9-537d-4758-8059-84c20865bc85 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.805324] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b010dcc2-126e-4c77-aa31-3ab73be34f84 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.818149] env[61972]: DEBUG nova.compute.provider_tree [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed in ProviderTree for provider: 2f34b92c-91e8-4983-ae34-7426fcec3157 {{(pid=61972) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.321642] env[61972]: DEBUG nova.scheduler.client.report [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Inventory has not changed for provider 2f34b92c-91e8-4983-ae34-7426fcec3157 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61972) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1166.827323] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.603s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.848270] env[61972]: INFO nova.scheduler.client.report [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Deleted allocations for instance 6a19486c-40c9-4210-a16f-22d9e8563cd9 [ 1167.357458] env[61972]: DEBUG oslo_concurrency.lockutils [None req-c740149e-2963-410b-8d04-2031099b90ac tempest-AttachVolumeTestJSON-1233740891 tempest-AttachVolumeTestJSON-1233740891-project-member] Lock "6a19486c-40c9-4210-a16f-22d9e8563cd9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.891s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.620755] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.621145] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Starting heal instance info cache {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1191.621145] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Rebuilding the list of instances to heal {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 1192.123545] env[61972]: DEBUG nova.compute.manager [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Didn't find any instances for network info cache update. {{(pid=61972) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1194.619626] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.619951] env[61972]: DEBUG oslo_service.periodic_task [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Running periodic task ComputeManager.update_available_resource {{(pid=61972) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.122636] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.122894] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.123082] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.123240] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61972) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1195.124170] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075f03b7-76bb-44e6-842b-26898e080896 {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.133545] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd8cadb-79b0-4ccb-b96d-4b31864731ae {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.147380] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f27de0-5aa9-4b14-a8ec-3a9b0bca23fe {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.153470] env[61972]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812d59c7-5f61-4b95-9281-b250928be88a {{(pid=61972) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.180908] env[61972]: DEBUG nova.compute.resource_tracker [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181154MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61972) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1195.181052] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.181242] env[61972]: DEBUG oslo_concurrency.lockutils [None req-192eed6a-4960-4550-bd33-9df825dcafaa None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61972) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}}